03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x35c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x238, 0xb, 0x0, 0x1, [{0x1a4, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x198, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x194, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x5}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x960c}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:57 executing program 0: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x95cc, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95a0, 0x3, 0x0, 0x1, [{0x4}, {0x4d78, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1358, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x42, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x368, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x244, 0xb, 0x0, 0x1, [{0x1b0, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x1a4, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1a0, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x50, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x5}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x3b}, @NFTA_VERDICT_CODE, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x9d, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0xfffffffffffffdfb}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x95cc}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:57 executing program 4: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x6e) 00:43:57 executing program 2: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0}, 0x90) [ 2638.021605][ T4069] 8021q: adding VLAN 0 to HW filter on device batadv0 00:43:58 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r1 = mmap$IORING_OFF_SQ_RING(&(0x7f0000000000/0x4000)=nil, 0x4000, 0x1, 0x4000832, 0xffffffffffffffff, 0x0) syz_io_uring_submit(r1, 0x0, 0x0) r2 = syz_init_net_socket$nfc_llcp(0x27, 0x3, 0x1) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)=0x0) r5 = socket(0x11, 0x3, 0x0) sendmmsg$nfc_llcp(r5, &(0x7f00000016c0)=[{&(0x7f0000000040)={0x27, r4, 0x0, 0x0, 0x0, 0x0, "18f5ff89de6847661dba79f74a7cf69cf0d36d660c3510d5fcc532b0fafc43d906ae154b69de5c5425ab3261d94b6da7b3dd8aea92c2ea8f66fa6ae87d03c4"}, 0x60, &(0x7f00000001c0)=[{&(0x7f00000000c0)="8c", 0x1}], 0x1}, {&(0x7f0000000300)={0x27, 0x0, 0x0, 0x0, 0x0, 0x0, "d67333796b09325594677eb0ebd7d2a9c22d26004fa83daef9cc37534506b0bf13cbd8f158e62a2a2b0c5e1ea67fc6d651e3bf20bca16bba7984caf898531f"}, 0x60, &(0x7f0000000740)}], 0x2, 0x0) connect$nfc_llcp(r2, &(0x7f0000000600)={0x27, r4, 0x1, 0x2, 0x3f, 0x7f, "b060de8fa9e885f202d703a8a273408a3d07e3bce0fb0df9a75a77768b0634325ea1866c93d9ef6ed61d38ad78c5eea38269a9dc9944fcc9c740c072169340", 0x3}, 0x60) r6 = mmap$IORING_OFF_SQES(&(0x7f0000ffb000/0x3000)=nil, 0x3000, 0x0, 0x13, 0xffffffffffffffff, 0x10000000) syz_io_uring_submit(r1, r6, &(0x7f00000005c0)=@IORING_OP_READV=@pass_iovec={0x1, 0x60, 0x2004, @fd=r5, 0x0, &(0x7f0000000540)=[{&(0x7f0000000040)=""/125, 0x7d}, {&(0x7f00000000c0)=""/142, 0x8e}, {&(0x7f0000000180)=""/46, 0x2e}, {&(0x7f00000001c0)=""/80, 0x50}, {&(0x7f0000000240)=""/212, 0xd4}, {&(0x7f0000000340)=""/48, 0x30}, {&(0x7f0000000380)=""/192, 0xc0}, {&(0x7f0000000440)=""/226, 0xe2}], 0x8, 0x2, 0x1}) 00:43:58 executing program 1: r0 = socket$nl_generic(0x10, 0x3, 0x10) sendmsg$MPTCP_PM_CMD_FLUSH_ADDRS(r0, &(0x7f0000000100)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x400000}, 0xc, &(0x7f0000000080)={&(0x7f0000000040)={0x24, 0x0, 0x10, 0x70bd2c, 0x25dfdbfe, {}, [@MPTCP_PM_ATTR_RCV_ADD_ADDRS={0x8, 0x2, 0x8}, @MPTCP_PM_ATTR_TOKEN={0x8, 0x4, 0x3ff}]}, 0x24}, 0x1, 0x0, 0x0, 0x40}, 0x4055) syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) (async, rerun: 32) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) (rerun: 32) r1 = openat$binder_debug(0xffffffffffffff9c, &(0x7f0000000140)='/sys/kernel/debug/binder/transactions\x00', 0x0, 0x0) syz_genetlink_get_family_id$mptcp(&(0x7f00000000c0), r1) 00:43:58 executing program 4: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x960c, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e0, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x35c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x238, 0xb, 0x0, 0x1, [{0x1a4, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x198, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x194, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x5}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x960c}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 2: r0 = syz_open_dev$loop(&(0x7f0000000000), 0x100000001, 0x0) r1 = socket$nl_generic(0x10, 0x3, 0x10) ioctl$LOOP_CHANGE_FD(r0, 0x4c06, r1) bpf$PROG_LOAD_XDP(0x5, &(0x7f0000000680)={0x6, 0xf, &(0x7f00000004c0)=@ringbuf, &(0x7f0000000580)='GPL\x00', 0x0, 0x0, 0x0, 0x0, 0x0, '\x00', 0x0, 0x25, 0xffffffffffffffff, 0x8, 0x0, 0x0, 0x10, &(0x7f0000000600), 0x10}, 0x90) (async) bpf$PROG_LOAD_XDP(0x5, &(0x7f0000000680)={0x6, 0xf, &(0x7f00000004c0)=@ringbuf, &(0x7f0000000580)='GPL\x00', 0x0, 0x0, 0x0, 0x0, 0x0, '\x00', 0x0, 0x25, 0xffffffffffffffff, 0x8, 0x0, 0x0, 0x10, &(0x7f0000000600), 0x10}, 0x90) 00:43:58 executing program 4: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x6e) 00:43:58 executing program 2: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x960c, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e0, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x35c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x238, 0xb, 0x0, 0x1, [{0x1a4, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x198, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x194, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x5}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x960c}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 0: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x2}, 0x90) 00:43:58 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r1 = mmap$IORING_OFF_SQ_RING(&(0x7f0000000000/0x4000)=nil, 0x4000, 0x1, 0x4000832, 0xffffffffffffffff, 0x0) syz_io_uring_submit(r1, 0x0, 0x0) r2 = syz_init_net_socket$nfc_llcp(0x27, 0x3, 0x1) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)=0x0) r5 = socket(0x11, 0x3, 0x0) sendmmsg$nfc_llcp(r5, &(0x7f00000016c0)=[{&(0x7f0000000040)={0x27, r4, 0x0, 0x0, 0x0, 0x0, "18f5ff89de6847661dba79f74a7cf69cf0d36d660c3510d5fcc532b0fafc43d906ae154b69de5c5425ab3261d94b6da7b3dd8aea92c2ea8f66fa6ae87d03c4"}, 0x60, &(0x7f00000001c0)=[{&(0x7f00000000c0)="8c", 0x1}], 0x1}, {&(0x7f0000000300)={0x27, 0x0, 0x0, 0x0, 0x0, 0x0, "d67333796b09325594677eb0ebd7d2a9c22d26004fa83daef9cc37534506b0bf13cbd8f158e62a2a2b0c5e1ea67fc6d651e3bf20bca16bba7984caf898531f"}, 0x60, &(0x7f0000000740)}], 0x2, 0x0) connect$nfc_llcp(r2, &(0x7f0000000600)={0x27, r4, 0x1, 0x2, 0x3f, 0x7f, "b060de8fa9e885f202d703a8a273408a3d07e3bce0fb0df9a75a77768b0634325ea1866c93d9ef6ed61d38ad78c5eea38269a9dc9944fcc9c740c072169340", 0x3}, 0x60) r6 = mmap$IORING_OFF_SQES(&(0x7f0000ffb000/0x3000)=nil, 0x3000, 0x0, 0x13, 0xffffffffffffffff, 0x10000000) syz_io_uring_submit(r1, r6, &(0x7f00000005c0)=@IORING_OP_READV=@pass_iovec={0x1, 0x60, 0x2004, @fd=r5, 0x0, &(0x7f0000000540)=[{&(0x7f0000000040)=""/125, 0x7d}, {&(0x7f00000000c0)=""/142, 0x8e}, {&(0x7f0000000180)=""/46, 0x2e}, {&(0x7f00000001c0)=""/80, 0x50}, {&(0x7f0000000240)=""/212, 0xd4}, {&(0x7f0000000340)=""/48, 0x30}, {&(0x7f0000000380)=""/192, 0xc0}, {&(0x7f0000000440)=""/226, 0xe2}], 0x8, 0x2, 0x1}) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) mmap$IORING_OFF_SQ_RING(&(0x7f0000000000/0x4000)=nil, 0x4000, 0x1, 0x4000832, 0xffffffffffffffff, 0x0) (async) syz_io_uring_submit(r1, 0x0, 0x0) (async) syz_init_net_socket$nfc_llcp(0x27, 0x3, 0x1) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) (async) socket(0x11, 0x3, 0x0) (async) sendmmsg$nfc_llcp(r5, &(0x7f00000016c0)=[{&(0x7f0000000040)={0x27, r4, 0x0, 0x0, 0x0, 0x0, "18f5ff89de6847661dba79f74a7cf69cf0d36d660c3510d5fcc532b0fafc43d906ae154b69de5c5425ab3261d94b6da7b3dd8aea92c2ea8f66fa6ae87d03c4"}, 0x60, &(0x7f00000001c0)=[{&(0x7f00000000c0)="8c", 0x1}], 0x1}, {&(0x7f0000000300)={0x27, 0x0, 0x0, 0x0, 0x0, 0x0, "d67333796b09325594677eb0ebd7d2a9c22d26004fa83daef9cc37534506b0bf13cbd8f158e62a2a2b0c5e1ea67fc6d651e3bf20bca16bba7984caf898531f"}, 0x60, &(0x7f0000000740)}], 0x2, 0x0) (async) connect$nfc_llcp(r2, &(0x7f0000000600)={0x27, r4, 0x1, 0x2, 0x3f, 0x7f, "b060de8fa9e885f202d703a8a273408a3d07e3bce0fb0df9a75a77768b0634325ea1866c93d9ef6ed61d38ad78c5eea38269a9dc9944fcc9c740c072169340", 0x3}, 0x60) (async) mmap$IORING_OFF_SQES(&(0x7f0000ffb000/0x3000)=nil, 0x3000, 0x0, 0x13, 0xffffffffffffffff, 0x10000000) (async) syz_io_uring_submit(r1, r6, &(0x7f00000005c0)=@IORING_OP_READV=@pass_iovec={0x1, 0x60, 0x2004, @fd=r5, 0x0, &(0x7f0000000540)=[{&(0x7f0000000040)=""/125, 0x7d}, {&(0x7f00000000c0)=""/142, 0x8e}, {&(0x7f0000000180)=""/46, 0x2e}, {&(0x7f00000001c0)=""/80, 0x50}, {&(0x7f0000000240)=""/212, 0xd4}, {&(0x7f0000000340)=""/48, 0x30}, {&(0x7f0000000380)=""/192, 0xc0}, {&(0x7f0000000440)=""/226, 0xe2}], 0x8, 0x2, 0x1}) (async) 00:43:58 executing program 0: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x9610, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e4, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x360, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x23c, 0xb, 0x0, 0x1, [{0x1a8, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x19c, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x198, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x48, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x9610}, 0x1, 0x0, 0x0, 0x10}, 0x800) [ 2638.284701][ T4069] veth0_vlan: entered promiscuous mode 00:43:58 executing program 0: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x3}, 0x90) [ 2638.306398][ T8338] hsr_slave_0: left promiscuous mode 00:43:58 executing program 4: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x9610, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e4, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x360, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x23c, 0xb, 0x0, 0x1, [{0x1a8, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x19c, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x198, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x48, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x9610}, 0x1, 0x0, 0x0, 0x10}, 0x800) [ 2638.340165][ T8338] hsr_slave_1: left promiscuous mode 00:43:58 executing program 4: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x74) 00:43:58 executing program 2: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x9610, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e4, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x360, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x23c, 0xb, 0x0, 0x1, [{0x1a8, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x19c, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x198, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x48, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x9610}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 0: bpf$PROG_LOAD_XDP(0x5, &(0x7f0000000180)={0x6, 0xf, &(0x7f0000000000)=ANY=[@ANYBLOB="18678e9f5f8dddcd28000b99858da9ad104f0483cf86ee52080a29d4679831e21329c365cb97ed3863e7b606f93e9b2e8518e3f5845cbe96ff214304f13d8fc0b604a049c5a1171ccabd94f1b02f3c28150bc1d9da4410b5b89e6893a223a1cf31f45a4c70dcd9079917ebe81173b9db21a7c18b9b2f33d936512f3c43d47ac7abc9cfaafbf76fa1c74e5721b250c3fcb1159f99ed4a94f4918531411a05d4fe932c0aa71048d1882a000b280c8ca1ecf5538e7b5c5682942f7074f5edd1784c8b71043bc1a4325ca4994579dd6154a403abddf1414b8687db69cc1bea9e", @ANYRES32, @ANYBLOB="0000000000000000b702000014000000b7030000000000008500000083000000bf09000000000000550901000000e5009500000000000000bf91000000000000b7020000000000008500000000000000b7000000000000009500000000000000"], &(0x7f0000000580)='GPL\x00', 0x0, 0x0, 0x0, 0x0, 0x0, '\x00', 0x0, 0x25, 0xffffffffffffffff, 0x8, 0x0, 0x0, 0x10, &(0x7f0000000600)={0x0, 0x0, 0x1000000}, 0x10}, 0x90) [ 2638.399008][T32158] Bluetooth: hci0: command 0x041b tx timeout [ 2638.429265][ T8338] batman_adv: batadv0: Interface deactivated: batadv_slave_0 [ 2638.431339][ T8338] batman_adv: batadv0: Removing interface: batadv_slave_0 [ 2638.448478][ T8338] batman_adv: batadv0: Interface deactivated: batadv_slave_1 00:43:58 executing program 0: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x4}, 0x90) [ 2638.462618][ T8338] batman_adv: batadv0: Removing interface: batadv_slave_1 00:43:58 executing program 2: r0 = socket$nl_generic(0x10, 0x3, 0x10) r1 = syz_genetlink_get_family_id$mptcp(&(0x7f0000000040), 0xffffffffffffffff) sendmsg$MPTCP_PM_CMD_SUBFLOW_DESTROY(r0, &(0x7f00000003c0)={&(0x7f0000000140), 0xc, &(0x7f0000000180)={&(0x7f0000000300)={0x84, r1, 0x200, 0x70bd25, 0x25dfdbff, {}, [@MPTCP_PM_ATTR_LOC_ID={0x5, 0x5, 0x2}, @MPTCP_PM_ATTR_ADDR_REMOTE={0x4}, @MPTCP_PM_ATTR_ADDR_REMOTE={0x38, 0x6, 0x0, 0x1, [@MPTCP_PM_ADDR_ATTR_PORT={0x6, 0x5, 0x4e20}, @MPTCP_PM_ADDR_ATTR_ADDR6={0x14, 0x4, @empty}, @MPTCP_PM_ADDR_ATTR_ADDR4={0x8, 0x3, @rand_addr=0x64010102}, @MPTCP_PM_ADDR_ATTR_ADDR4={0x8, 0x3, @empty}, @MPTCP_PM_ADDR_ATTR_FLAGS={0x8, 0x6, 0x4}]}, @MPTCP_PM_ATTR_TOKEN={0x8, 0x4, 0x4}, @MPTCP_PM_ATTR_ADDR={0x4}, @MPTCP_PM_ATTR_TOKEN={0x8, 0x4, 0x3}, @MPTCP_PM_ATTR_RCV_ADD_ADDRS={0x8}, @MPTCP_PM_ATTR_LOC_ID={0x5, 0x5, 0x80}, @MPTCP_PM_ATTR_SUBFLOWS={0x8, 0x3, 0x5}]}, 0x84}, 0x1, 0x0, 0x0, 0x4000010}, 0x20004044) ioctl$sock_ipv6_tunnel_SIOCGET6RD(0xffffffffffffffff, 0x89f8, &(0x7f0000000540)={'tunl0\x00', &(0x7f00000006c0)=ANY=[@ANYBLOB="74756e6c3000000000000000a80e17f03bd9ba9e1dc7c08dee97e0740b338c2d3947258a8c9bec06e5006f9b701b86b834f728c0a89300c53d29ede2336f69530000000000", @ANYRES32=0x0, @ANYBLOB="7800004000000009000000054cd800b000640000095e9078ffffffffac1e0101830fa9ac1414bbe000000100000000072b6c0a010101ac1414aaac1ed2016401010064010102ac141416ac1414aa00000000ac14140fac1414aa008905248145441050f0000000080000000100000000862b0000000207071514668ab5020b2991725445005319b5000edfe63591e79bef17c118de660505d454740707efffffffff9404010001441405836401010200000007e00000020000000300"]}) sendmsg$MPTCP_PM_CMD_SUBFLOW_CREATE(r0, &(0x7f0000000680)={&(0x7f0000000400)={0x10, 0x0, 0x0, 0x40000}, 0xc, &(0x7f0000000640)={&(0x7f0000000580)={0x8c, r1, 0x1, 0x70bd2b, 0x25dfdbfe, {}, [@MPTCP_PM_ATTR_LOC_ID={0x5}, @MPTCP_PM_ATTR_RCV_ADD_ADDRS={0x8}, @MPTCP_PM_ATTR_SUBFLOWS={0x8, 0x3, 0x1}, @MPTCP_PM_ATTR_ADDR_REMOTE={0x24, 0x6, 0x0, 0x1, [@MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0xa}, @MPTCP_PM_ADDR_ATTR_IF_IDX={0x8, 0x7, r2}, @MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0x2}, @MPTCP_PM_ADDR_ATTR_PORT={0x6, 0x5, 0x4e24}]}, @MPTCP_PM_ATTR_ADDR={0x24, 0x1, 0x0, 0x1, [@MPTCP_PM_ADDR_ATTR_PORT={0x6, 0x5, 0x4e21}, @MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0x2}, @MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0xa}, @MPTCP_PM_ADDR_ATTR_ADDR4={0x8, 0x3, @multicast1}]}, @MPTCP_PM_ATTR_LOC_ID={0x5, 0x5, 0x4}, @MPTCP_PM_ATTR_SUBFLOWS={0x8, 0x3, 0x6}, @MPTCP_PM_ATTR_RCV_ADD_ADDRS={0x8, 0x2, 0x3}]}, 0x8c}, 0x1, 0x0, 0x0, 0x24004014}, 0x0) syz_genetlink_get_family_id$mptcp(&(0x7f0000000440), r0) ioctl$sock_ipv4_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000100)={'erspan0\x00', &(0x7f0000000080)={'gre0\x00', 0x0, 0x0, 0x20, 0x6, 0x2, {{0x10, 0x4, 0x0, 0xf, 0x40, 0x65, 0x0, 0x80, 0x4, 0x0, @broadcast, @remote, {[@cipso={0x86, 0x18, 0x2, [{0x2, 0x12, "5070106d312443ad3771e8b2ba39a84e"}]}, @ra={0x94, 0x4, 0x1000}, @generic={0x89, 0x5, "44e14e"}, @ssrr={0x89, 0x7, 0x97, [@remote]}, @end, @end]}}}}}) sendmsg$MPTCP_PM_CMD_SET_FLAGS(r0, &(0x7f0000000200)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x802042}, 0xc, &(0x7f00000001c0)={&(0x7f0000000280)=ANY=[@ANYBLOB="600000005343bdcca94cb170428409811c0546", @ANYRES16=r1, @ANYBLOB="000828bd7000fbdbdf25070000002c00068008000700", @ANYRES32=r3, @ANYBLOB="050002001f000000060005004e240000080006001300000005000200ff0000000800030004000000050005000100000008000400470400000800040002000000"], 0x60}, 0x1, 0x0, 0x0, 0x4000044}, 0x1) ioctl$ifreq_SIOCGIFINDEX_wireguard(r0, 0x8933, &(0x7f0000000240)={'wg2\x00'}) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) [ 2638.476336][ T8338] bridge_slave_1: left allmulticast mode [ 2638.491397][ T8338] bridge_slave_1: left promiscuous mode [ 2638.504249][ T8338] bridge0: port 2(bridge_slave_1) entered disabled state 00:43:58 executing program 0: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x95cc, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95a0, 0x3, 0x0, 0x1, [{0x4}, {0x4d78, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1358, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x42, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x368, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0xf9c}, @NFTA_SET_ELEM_EXPRESSIONS={0x244, 0xb, 0x0, 0x1, [{0x1b0, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x1a4, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1a0, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x50, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x3b}, @NFTA_VERDICT_CODE, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x9d, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0xfffffffffffffdfb}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x95cc}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 4: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x5}, 0x90) [ 2638.541101][ T8338] bridge_slave_0: left allmulticast mode [ 2638.547710][ T8338] bridge_slave_0: left promiscuous mode [ 2638.561499][ T8338] bridge0: port 1(bridge_slave_0) entered disabled state 00:43:58 executing program 0: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x7a) 00:43:58 executing program 4: bpf$PROG_LOAD_XDP(0x5, &(0x7f0000000180)={0x6, 0xf, &(0x7f0000000000)=ANY=[@ANYBLOB="18678e9f5f8dddcd28000b99858da9ad104f0483cf86ee52080a29d4679831e21329c365cb97ed3863e7b606f93e9b2e8518e3f5845cbe96ff214304f13d8fc0b604a049c5a1171ccabd94f1b02f3c28150bc1d9da4410b5b89e6893a223a1cf31f45a4c70dcd9079917ebe81173b9db21a7c18b9b2f33d936512f3c43d47ac7abc9cfaafbf76fa1c74e5721b250c3fcb1159f99ed4a94f4918531411a05d4fe932c0aa71048d1882a000b280c8ca1ecf5538e7b5c5682942f7074f5edd1784c8b71043bc1a4325ca4994579dd6154a403abddf1414b8687db69cc1bea9e", @ANYRES32, @ANYBLOB="0000000000000000b702000014000000b7030000000000008500000083000000bf09000000000000550901000000e5009500000000000000bf91000000000000b7020000000000008500000000000000b7000000000000009500000000000000"], &(0x7f0000000580)='GPL\x00', 0x0, 0x0, 0x0, 0x0, 0x0, '\x00', 0x0, 0x25, 0xffffffffffffffff, 0x8, 0x0, 0x0, 0x10, &(0x7f0000000600)={0x0, 0x0, 0x1000000}, 0x10}, 0x90) 00:43:58 executing program 2: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x960c, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e0, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x35c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x244, 0xb, 0x0, 0x1, [{0x1b0, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x1a4, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1a0, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x50, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x960c}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 4: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x6}, 0x90) 00:43:58 executing program 2: socket$nl_generic(0x10, 0x3, 0x10) (async) r0 = socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$mptcp(&(0x7f0000000040), 0xffffffffffffffff) (async) r1 = syz_genetlink_get_family_id$mptcp(&(0x7f0000000040), 0xffffffffffffffff) sendmsg$MPTCP_PM_CMD_SUBFLOW_DESTROY(r0, &(0x7f00000003c0)={&(0x7f0000000140), 0xc, &(0x7f0000000180)={&(0x7f0000000300)={0x84, r1, 0x200, 0x70bd25, 0x25dfdbff, {}, [@MPTCP_PM_ATTR_LOC_ID={0x5, 0x5, 0x2}, @MPTCP_PM_ATTR_ADDR_REMOTE={0x4}, @MPTCP_PM_ATTR_ADDR_REMOTE={0x38, 0x6, 0x0, 0x1, [@MPTCP_PM_ADDR_ATTR_PORT={0x6, 0x5, 0x4e20}, @MPTCP_PM_ADDR_ATTR_ADDR6={0x14, 0x4, @empty}, @MPTCP_PM_ADDR_ATTR_ADDR4={0x8, 0x3, @rand_addr=0x64010102}, @MPTCP_PM_ADDR_ATTR_ADDR4={0x8, 0x3, @empty}, @MPTCP_PM_ADDR_ATTR_FLAGS={0x8, 0x6, 0x4}]}, @MPTCP_PM_ATTR_TOKEN={0x8, 0x4, 0x4}, @MPTCP_PM_ATTR_ADDR={0x4}, @MPTCP_PM_ATTR_TOKEN={0x8, 0x4, 0x3}, @MPTCP_PM_ATTR_RCV_ADD_ADDRS={0x8}, @MPTCP_PM_ATTR_LOC_ID={0x5, 0x5, 0x80}, @MPTCP_PM_ATTR_SUBFLOWS={0x8, 0x3, 0x5}]}, 0x84}, 0x1, 0x0, 0x0, 0x4000010}, 0x20004044) ioctl$sock_ipv6_tunnel_SIOCGET6RD(0xffffffffffffffff, 0x89f8, &(0x7f0000000540)={'tunl0\x00', &(0x7f00000006c0)=ANY=[@ANYBLOB="74756e6c3000000000000000a80e17f03bd9ba9e1dc7c08dee97e0740b338c2d3947258a8c9bec06e5006f9b701b86b834f728c0a89300c53d29ede2336f69530000000000", @ANYRES32=0x0, @ANYBLOB="7800004000000009000000054cd800b000640000095e9078ffffffffac1e0101830fa9ac1414bbe000000100000000072b6c0a010101ac1414aaac1ed2016401010064010102ac141416ac1414aa00000000ac14140fac1414aa008905248145441050f0000000080000000100000000862b0000000207071514668ab5020b2991725445005319b5000edfe63591e79bef17c118de660505d454740707efffffffff9404010001441405836401010200000007e00000020000000300"]}) (async) ioctl$sock_ipv6_tunnel_SIOCGET6RD(0xffffffffffffffff, 0x89f8, &(0x7f0000000540)={'tunl0\x00', &(0x7f00000006c0)=ANY=[@ANYBLOB="74756e6c3000000000000000a80e17f03bd9ba9e1dc7c08dee97e0740b338c2d3947258a8c9bec06e5006f9b701b86b834f728c0a89300c53d29ede2336f69530000000000", @ANYRES32=0x0, @ANYBLOB="7800004000000009000000054cd800b000640000095e9078ffffffffac1e0101830fa9ac1414bbe000000100000000072b6c0a010101ac1414aaac1ed2016401010064010102ac141416ac1414aa00000000ac14140fac1414aa008905248145441050f0000000080000000100000000862b0000000207071514668ab5020b2991725445005319b5000edfe63591e79bef17c118de660505d454740707efffffffff9404010001441405836401010200000007e00000020000000300"]}) sendmsg$MPTCP_PM_CMD_SUBFLOW_CREATE(r0, &(0x7f0000000680)={&(0x7f0000000400)={0x10, 0x0, 0x0, 0x40000}, 0xc, &(0x7f0000000640)={&(0x7f0000000580)={0x8c, r1, 0x1, 0x70bd2b, 0x25dfdbfe, {}, [@MPTCP_PM_ATTR_LOC_ID={0x5}, @MPTCP_PM_ATTR_RCV_ADD_ADDRS={0x8}, @MPTCP_PM_ATTR_SUBFLOWS={0x8, 0x3, 0x1}, @MPTCP_PM_ATTR_ADDR_REMOTE={0x24, 0x6, 0x0, 0x1, [@MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0xa}, @MPTCP_PM_ADDR_ATTR_IF_IDX={0x8, 0x7, r2}, @MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0x2}, @MPTCP_PM_ADDR_ATTR_PORT={0x6, 0x5, 0x4e24}]}, @MPTCP_PM_ATTR_ADDR={0x24, 0x1, 0x0, 0x1, [@MPTCP_PM_ADDR_ATTR_PORT={0x6, 0x5, 0x4e21}, @MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0x2}, @MPTCP_PM_ADDR_ATTR_FAMILY={0x6, 0x1, 0xa}, @MPTCP_PM_ADDR_ATTR_ADDR4={0x8, 0x3, @multicast1}]}, @MPTCP_PM_ATTR_LOC_ID={0x5, 0x5, 0x4}, @MPTCP_PM_ATTR_SUBFLOWS={0x8, 0x3, 0x6}, @MPTCP_PM_ATTR_RCV_ADD_ADDRS={0x8, 0x2, 0x3}]}, 0x8c}, 0x1, 0x0, 0x0, 0x24004014}, 0x0) syz_genetlink_get_family_id$mptcp(&(0x7f0000000440), r0) ioctl$sock_ipv4_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000100)={'erspan0\x00', &(0x7f0000000080)={'gre0\x00', 0x0, 0x0, 0x20, 0x6, 0x2, {{0x10, 0x4, 0x0, 0xf, 0x40, 0x65, 0x0, 0x80, 0x4, 0x0, @broadcast, @remote, {[@cipso={0x86, 0x18, 0x2, [{0x2, 0x12, "5070106d312443ad3771e8b2ba39a84e"}]}, @ra={0x94, 0x4, 0x1000}, @generic={0x89, 0x5, "44e14e"}, @ssrr={0x89, 0x7, 0x97, [@remote]}, @end, @end]}}}}}) sendmsg$MPTCP_PM_CMD_SET_FLAGS(r0, &(0x7f0000000200)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x802042}, 0xc, &(0x7f00000001c0)={&(0x7f0000000280)=ANY=[@ANYBLOB="600000005343bdcca94cb170428409811c0546", @ANYRES16=r1, @ANYBLOB="000828bd7000fbdbdf25070000002c00068008000700", @ANYRES32=r3, @ANYBLOB="050002001f000000060005004e240000080006001300000005000200ff0000000800030004000000050005000100000008000400470400000800040002000000"], 0x60}, 0x1, 0x0, 0x0, 0x4000044}, 0x1) ioctl$ifreq_SIOCGIFINDEX_wireguard(r0, 0x8933, &(0x7f0000000240)={'wg2\x00'}) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) 00:43:58 executing program 0: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x960c, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e0, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x35c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x244, 0xb, 0x0, 0x1, [{0x1b0, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x1a4, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1a0, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x50, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x960c}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 4: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0xc0) [ 2638.729910][ T8338] veth1_macvtap: left promiscuous mode [ 2638.731486][ T8338] veth0_macvtap: left promiscuous mode [ 2638.733031][ T8338] veth1_vlan: left promiscuous mode [ 2638.734434][ T8338] veth0_vlan: left promiscuous mode 00:43:58 executing program 0: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x7}, 0x90) 00:43:58 executing program 4: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x960c, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95e0, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x35c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x244, 0xb, 0x0, 0x1, [{0x1b0, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x1a4, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1a0, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x50, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x960c}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 0: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x108) 00:43:58 executing program 2: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x8}, 0x90) 00:43:58 executing program 4: r0 = semget$private(0x0, 0x3, 0x0) semop(r0, &(0x7f0000000280)=[{0x0, 0x0, 0x1800}], 0x1) semctl$SETALL(r0, 0x0, 0x11, &(0x7f0000000000)=[0x1, 0x3, 0x8, 0x3ff, 0x7f]) r1 = semget(0x1, 0x5, 0x120) semctl$SETALL(r1, 0x0, 0x11, &(0x7f0000000040)=[0x8d, 0x0, 0x5, 0x5, 0x4, 0x0, 0x800]) 00:43:58 executing program 2: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x95cc, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x95a0, 0x3, 0x0, 0x1, [{0x4}, {0x4d78, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1358, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x42, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x1250, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x1240, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x3c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0x43ce6c2ab07e3ca0}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x100}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x9}]}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x1004, 0x1, "592f8dc659201fe41299b6b05fe36d1051e39727384dd926e92520475da2a1aed74f72082f59ce62da0155ca06c968a14d02283d7527513e3cf98901e1e0bf5706e9ca02ac2ceb996bb0bd281c3fd982b6f9a4f1ccbe1afb37b3d8e771f6af777e147b4e0320a1b77a32996c01033f4515db4be0b2a110b2a5341c80621da8b787db23a839d32d9d5ab446fb77235a803162670b6f3c5523503dc00d882abc78dccf1706b54e4803c09ba95ea74319e2e1b434f18d4dab034e4bd1165b8d0f393330ab4cb1718bcf5362d2decd78f935b98f4d81f159de70988a89b464d9f3fca7c450d51a58f1a6e3cd8c47fca502be91dbbf4110f79d1794072d4222c1d33a3e298d1bde49e1ed7d68d29c41f8e7c241db268640f8f9eea131fcf3c87160e995016831765e3d937d3d628bb3fcca7ba3e7d5db11704662ab828b0383b792c135a8ae6fae9a4eda9081dceaedbb73d4e9fdee6bf7c6c8e420ccbd3dab994b3d2e03b411fa33e98df62cf447f39c49470eddfe73ca651d6799dad52b661eb568f2438db915cd8a5891d371c1758a270e9102a201af765a39b07d6c6b95298c791b14b06c382a335493b530ece371980efd5086f45f8f2c1ab66372bb5acf0c6068fdc6f6614a82f301d7f7415152e4291d2cec2170fcf8e69181837399cf292346de5fe9bd9fa82b9edf31072a867bba218da47e5b61fe6b6752623b370a26263756d3319b6a79af966ac23ee6ce02d4b7d7580490e7d9547920b9948d6c6e9cc54cbf0132f8ae0fa9b093bee0fa4774bff056c833c1deefddb31e5958cf5ce0208d58e2c7a599b5f00829fe7cd411154d08d132cb1a4b6e34eda1f39cb9eccc0476c3488c17aada7ed77d1d2018aa9d20c62bed5ed459d2b2167451ea944a209680e62ecd99c7f869efa4dbf4cfc0026a1a8102e0660bc7521bd453a134e22ef85b044e2e9a0fc5e96ea588aa6fab4373bf067d2b53266b076c9da1e392e91622789f8a150046a64d50cf73bf9a14223cb3de789871fd28281e58595ffa096c0541d526f59d9a529bd77dfb96f44f5b86aeeacf2612c2f9685eedb931e7ff5789389359cca364bf0611b1097f51ec08db6c47607e466ec7fa68801eccf6073605632309751bb72cb74bff860c87a1272b13ceb05a106946993264f897d4f6ee390fc670202468c1bd69fb1afe5a8dedbce0ce68d964bc2ebd1e4b4cc04f0e503fdb24af7bac07c8d60789e3ec847b2da72afd034086054b0c7bcb468e81013f9f89f7a14b9b68d70f15a2284eb510ed076c5cf79f13fb737bf5f227938691f5aa198a309d2f419c492e06c10e1509cb2be7c7fc15f67f3092a7ab5022f6adda4627d101b114e01c97fcb4903e2cd9f7f321ec6aef3268e081227b359174020666ca6df34622c98ffb99dfcde487099b6ea842849c4fd64bcf07431224236e6a6f361b23f9596656ae994bd02bc9f5fca8102df3e9f9bd1b2278cd0cd98bb15505a162a9ab013a6f9ff1a9d170b11f2463f28493066d0248ae3bbe995636aae8068bfd8b50033b0726cb398f7e138e187bdd8e21d21f90cf141fa432252581c5438f1269d352145399805d311ac324c62797dfb497540180e98f9b174f5a21417a672b2ffbee95aefb9f39ac40d5f6c130f5512b632a8b458b2e6efaf7aa95317a604e239d39c9eb7ba4f46ffca59cdefe7b4b554ae89d7589ea22a37bfbe20d42ee305413bfd1fe080fc5cb69cd93cda172152fd50ae792330b4e9e24884c7078d94fba03b72bd1cf26fe6f0551766d21e8cd4d5a51e91caad0ec8102c6b501f673f7711fac14c0b97590ed81d066e12a76b446761fc56ec34b3a2e6314e01874479b43f430a00b6cf1ec8ca6edaeee09e41afb1ba829638df74b09cc7a1d256539187a30a0c09d202b36558da5923096f15b239d12fca46513856b767d6a18500f365ac437df96e1c6292483112d5d318bf090b0411a0fbb199e292721fa2a294aa155d04e81b9cce816a89ef1824d554818863f64352fe16097d08037c32fd1f6667250cbec48ae87486d558598511149c927885a67870ac7f0b4bf0af5dcba232bc70252b0737c13cd0e27f72889373ff271da190bdd79d355534d6b0c3357991360cb3de2f7a7d5339c53347553dcd4d1efcdfe2aa6c7c24e92f696472187ab186a7c9bee324bb092fb970b92e1e9be700c689fdda66864c6b2e42eabad09fb77655b573c5c6a77dc785a3d0cd43961b18a765c10212de7277e00a8f414b876d7601ee53825fc6dc53090d5158ccda8bdb5c907eb90a5633d5b78fc100267b214547d0469ab3db1e4b1346c7a13287e0220538253010faed202af356733ff94359c92ebb0b90165eb0fda47455d1daedea7c3d2972ff48b816cde49d988d853d368897b6c5cfa1aef16e40e4f3f0d5f173892e09b016853ba6d93d74adce123b39c799cad000767f80a78e4d01c739ad7c21c04d29a03182b1d694838a3f1636eee6070c8077db27f9628432c3d28a8c4768a56826d2516d02b47e865f0b42af86e2897eaa64abb324a379375469cd20163531a11771d6d530df67918d4d0191283cc399ee5b52d302f7f78fc4c1f1e2b4c7042e96f68b9160001a6ad84ae891bb542a12ee0482c572cd0771616fc86c332b20701e3f6e9eec7f746d60c00c419dd62ddc8a5053551bf2cdfde3a781cf98b7442732df678df47c51584ef5bc3c1f750f38dc0f2fb3b79f05e2470181a8f5045bc4e3eb14c6eb789f86cab9636280f488370b75a2ae00f11717a707a3376717b861fd26f808566944e26a04e43b9dc7071777cc791773d7fc1357c68d1b464de94ab681aa6901f2c720ffa6715efa8119dbd9e8219c0c90eeb958eec0ae9ae52959fa967158300ceec72fd348e1b1542769e30f46e8426b0e03a77397d5280c88f1ff46d3a70f2552c6f2f69c89a04b5048d8d8beb7c7b041f035014c28aed9dd81718b90b807d9a8cab53e5337ff689523a91459c55693577fa8a597f78e9b46c47f5a296be4ec217989dc215f040cda03a4c9488e585d28a27c0ca70d1817d3f833009cbe835eed78ce736ef8154e2b19fdea4d3f03985d2e9bd500c4e37536015bba7197a3c22e572dde6ef7aae357de24bab767dfe8c9f853a5354ebf1163413161b9886590b2273e2228f88f476e5315d835431bad20e624f1e51640d5fc472862b9a942773e6f534109d6d46508f4daccd05b66b6771d02ca845bcb27ba9ccb12a04869f13e8746ee1c0727329bff19ac3572b670e1e2c7f5d4cc7adf5dd770783aedc17fefbbc1b9bed1acc1c7690275c6545e8d62548e3c9418ab608879e90a6b608e6c43dc3f4b382b0ea5d65df95ca39a2a9c1c7cad4fcaaaa094c0cc05fa816c79d529b67afb551eea0819e1291b1d4e1f5e1385e3570c378435ade1858bae896dbffeb5ef7d9ddf49ebb1c25395e3afb68aeea24ac06613cf297cc200797bbd3f2e0335697466cc290d882405ee7ff464ab338fb6f63aa222db63760d8359a3d744563ab85ff6c324ecfdfd9194b6c57b238ea5cebfe1b1afd36e91228d5629bf7cf56fdbd13e048b401ec8465826613858598560d4ea0edb2029ecdfef93cbd4064d31c6763d0540436eb6026dc0a1398108d58eea25832edf94991611a49bdbfb635be7bc31728381f6d5a3f99faec5bd8337725ec5051f860c8deaacded85715be61fb0f8b9e99c53c62620aa667f17d159ccaffe9d310380fc0e47aa4e39709633c4fe6217bfec80eb3db5cd2d3d787e8c13a12c07d2b730a09380cc21c636215168c7fd0ac72e47e86ca34336639bfb023b73b964ca9176d234df45dfb895f4593e0adf8557c645d15e66a4ed240969dbbdb017bf5576fd1f78e3f4a54d08624cd6bd8553ebefcdf083f4611a1f10fe6c2376f6d0edb08e5310e3e534a2458ac4d193da8070fdaed5c8b7fbc69c21c9b9144eca61179c1517a72c7ffca3356af8753dfdc94e2330e5ea7874763b49dfea0e70bc4ec16b394a017cbe62641fba83707b7d2a4ca8d32aa614b54b038fce1885ddcd272487fc92f386da2d216042ce6788c25a97a187ad75acb08459ece32b13c7f5b56b64451f775124300593ff45c8ad6f71105ac27296d19ed5b6488fc2b3224a2ac5b5caf8f2c063a9453b733aa8ae8e47d3ef2dc1c7efe612099d5b6a34310888c1d0b6e57812594c1d27d8bef6eef56a4f27f65568ff18e788795cbe2d3bb4898fa4d20531067d6c8a77a4d08a7302055fd61410b129b8d59b2f41c0814680fe9746cee6483ddcf7f51efc5d63fd54ea6fa9ccb48c6e1ee09c5030ae70d8f4177208c9cfdd5f72ceb87c5420505aba8cf2f01f56758f2cc547566e6568d8e71ff6045a90bd1f13b2eb80ad8b700edcef9d904b12644de816159c09176f6e9798278292e5527b775142d3e5cbd698bd15dc267481f776ecedb2a7f899dbb628af0d9a4055ac6ee86dc4bdc0009376984ab9a250a646545c38c8162058346a05a15f0aa454d3b11b5b956c7e4d3e77397262e42224388bbb50051d6eeb6094ffe04033b70c6b95dd6b8464847b1a2debf2241496dc062379d963ee81a46df58c276dcf24049d124d52581fad21bc90527476108f0fbafe33bae40804b772b77ad4bd9077a8907d4a7e0bbc80a9877e51482cdf2e3a898c744092f02137d01d621e7ce57c0093d24dd67690aa6fb8505d4cddb7da2735814c96d8215812d067191002ef0f97396a3479cea9a24423ed2cd30ec4f5de841e3932c3eaecd10166dfca9f2be45bf649818a5c9f61dbaa4aee0795d5585a9c4e4213720b8cf91aa0fad5b2f13d1bae76738f231d9f6ed6d87f77d33d7f2ace21e524b7a4a8cf7050c55ab6f3ec522205b7c058829990d4b295514a32344a6889fd1fea6385032b0b9f5dfa5f7616aa12b9ddb147ebd67322ef9682c8e12db91c1936586790974310e2575769340e2e2b537e6c63344a52dd669d823042bbd63be7039ae7e2604306ba182a400982d01ea878f55843ac6601fe4ec3eb513f987690541c6727b4db90071219e2c5a74de2d892676bae1726c688b146723be400abbad650d34fd8aa6dd9edf1afbd0b2eb2edd70e7df2e285d26d79e617ffc0c0f140d0a80f69248ba3ebc319ddaa3e1c61152f51bf4750ce56514e736cdf70c9142a1ddb8c4ed1b49f61610f5ea436ba0e1dfa90c62cdd54d42cecc5d147b3aada3bbae691820746bef7767c2ecd3c4ae1d3d1a4652f18dd4d3f62eb010550a00b1bcd5c619908ff6b4abcd4dd1787c6e0da40f2d02f5b9be9217b846ee06e8d97006c84aae0c78c9cd2fc30c21d8d9ba28adef41968240edb5b9cf9a7107b638a641c948dbc69b4577fc5a72684156cf2977c70193520d87d791dfc4d64410e8b6fad966343a34165e23e827bf7bdd475e214b9a1e342e2c6822b27b0bfcccac6bc3c80e9101c0db2edf6816a77d8f64dfe12f5fdf40c68fde24ffd81be770a626cd2e012356599ffa72b0a5ec0903e0b704c8d29b9f1a3ddec17a3471734ca91a1a81f73f7186e3174cd33ef6d724316eaa0ae85fc072211ec3e19e985dab707a5f51ce8d8fc456ee52edcd469811ddf53096389d5a5f57291063a720c525a831497bb9136317ebc18df8ecf5cfb992295776cbd3d4452ee72df922d38b3d508f44d4c7fca49fb95bed36dc28a8ee4012723892418b79f0b14a53aafedebae81c641829ce8d874d335118fe5fa32801a4abc9f2e1e901b49a9c9f7dc70ed9f6a1182dff51c612b77db747c73640f53c86920f5823ab7c630"}, @NFTA_DATA_VERDICT={0x4}, @NFTA_DATA_VALUE={0x7f, 0x1, "4a392652c41dd9955bbfedf71e4528a0b53462f9744f6f26c3264e14dd5199fad75e381323150c9006038f95b2db622a24bf9b66984e6a165c9d5807ce608db3b4dd9482aaf006fa68ee6517a3230913ce8b1ddef6e2bb406ed46d1efded3b73fe5da774fbfeddedd6f5b48da5dfb3720ecb5dcfb3e8cb46dc952a"}, @NFTA_DATA_VALUE={0xd1, 0x1, "ad6149e036aae3e5b028b470b2665702e10587d00a8c4b6d3f2610c4407004bb8ff1e3b34f823e3663f2c2f3038514785d1f1e288283a5b575346920bb0eb40c2b5e75978191d308eb37cd82526aa6b2b4c526b336201394359b3d9be7a61d39371dfb65067ea70f6437ce79aa04530e51b84a4cf35fe25de1f1a65434eecad54ac653982d2b2624f0445364cca053e46e4702ad601781a5d622775b3adab55c1fe8ba7839f796aad99340ab5ac1b230f736b58c40125af447121b056072c66ec8000148d7d0e50e1a71cee9e2"}, @NFTA_DATA_VALUE={0x5f, 0x1, "04d617b6b934b92bd3dfa4f324eb98d0ccec6426b847caba1c10e238680ac6a5564bfc5eac4dff16f977bfc6a774655d76def22a066cd435e3a93c25d2be513e36f8daf1fb197bba5519d2d94e1b5036fc9d3d7cb8e0833dfb64ef"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10000000}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x3}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x2}]}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}]}, @NFTA_SET_ELEM_TIMEOUT={0xc, 0x4, 0x1, 0x0, 0xfffffffffffffffb}]}, {0x368, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPRESSIONS={0x244, 0xb, 0x0, 0x1, [{0x1b0, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x1a4, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1a0, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x50, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x3b}, @NFTA_VERDICT_CODE, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x9d, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0xfffffffffffffdfb}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x95cc}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:58 executing program 0: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0x9}, 0x90) 00:43:58 executing program 4: r0 = semget$private(0x0, 0x3, 0x0) semop(r0, &(0x7f0000000280)=[{0x0, 0x0, 0x1800}], 0x1) (async) semctl$SETALL(r0, 0x0, 0x11, &(0x7f0000000000)=[0x1, 0x3, 0x8, 0x3ff, 0x7f]) (async) r1 = semget(0x1, 0x5, 0x120) semctl$SETALL(r1, 0x0, 0x11, &(0x7f0000000040)=[0x8d, 0x0, 0x5, 0x5, 0x4, 0x0, 0x800]) [ 2638.998839][ T4690] BUG: MAX_LOCKDEP_KEYS too low! [ 2639.000276][ T4690] turning off the locking correctness validator. [ 2639.002051][ T4690] CPU: 1 PID: 4690 Comm: syz-executor.1 Not tainted 6.8.0-rc7-syzkaller-g707081b61156 #0 [ 2639.004682][ T4690] Hardware name: Google Google Compute Engine/Google Compute Engine, BIOS Google 01/25/2024 [ 2639.007399][ T4690] Call trace: [ 2639.008309][ T4690] dump_backtrace+0x1b8/0x1e4 [ 2639.009612][ T4690] show_stack+0x2c/0x3c [ 2639.010745][ T4690] dump_stack_lvl+0xd0/0x124 [ 2639.011994][ T4690] dump_stack+0x1c/0x28 [ 2639.013113][ T4690] register_lock_class+0x568/0x6ac [ 2639.014556][ T4690] __lock_acquire+0x184/0x763c [ 2639.015805][ T4690] lock_acquire+0x23c/0x71c [ 2639.016996][ T4690] __flush_workqueue+0x14c/0x11c4 [ 2639.018399][ T4690] drain_workqueue+0xb8/0x32c [ 2639.019651][ T4690] destroy_workqueue+0xb8/0xdc4 [ 2639.021011][ T4690] nci_unregister_device+0xac/0x21c [ 2639.022419][ T4690] virtual_ncidev_close+0x5c/0xa0 [ 2639.023755][ T4690] __fput+0x30c/0x738 [ 2639.024829][ T4690] ____fput+0x20/0x30 [ 2639.025928][ T4690] task_work_run+0x230/0x2e0 [ 2639.027143][ T4690] do_exit+0x618/0x1f64 [ 2639.028319][ T4690] do_group_exit+0x194/0x22c [ 2639.029598][ T4690] pid_child_should_wake+0x0/0x1dc [ 2639.030966][ T4690] invoke_syscall+0x98/0x2b8 [ 2639.032171][ T4690] el0_svc_common+0x130/0x23c [ 2639.033446][ T4690] do_el0_svc+0x48/0x58 [ 2639.034562][ T4690] el0_svc+0x54/0x168 [ 2639.035666][ T4690] el0t_64_sync_handler+0x84/0xfc [ 2639.036958][ T4690] el0t_64_sync+0x190/0x194 00:43:59 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:43:59 executing program 2: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0xa}, 0x90) 00:43:59 executing program 0: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x109) 00:43:59 executing program 4: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x83c8, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x839c, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a915e12e3718ce055ea8f3610df0855f85739d21cb78682aac041dc2ba9f43e7c4ab36c0cf1a1b4af872efba597976b70c4cdce1480e44a6c8f21575ab05e025003764768ae4b999e09ded47779ff3ef2daf552d5e29fb5b8a07662bc2aae19406c95f0b4277ae8c0dbc15afb423bbd0a7299ea411dfd32ec0811ee21c1031a7f27a36168ed619f32746d8a18f1ffdeec541f168402041eafaa96896677bfd4ceaaf0de8ef30fa46ed6221ed0db84891ca84789db14c55d065c98e0efb32057c3ffce85f30d93a92781aa0aced884207eefe6894922bae1027026a6b9fd0bd27cbce401fce80212687a64c28e16ca099c6c09a88ac75d7a1d4ec290f61609cdab99dfefb7b7b1c26bb9e58317a326a55304fc0362a69c51aba2279dfd3ee3bb1ea780386204991d2485490c6b21bfbdc7816c4f4cf9ac9a76223242e6e268a7352bbc39e5179aeaf38155d9a8365838196bd18fa77b91f170ae000ae0c5eefc08c5f0908c4310fb46e1c1c9d45aafbcf145b7f7b455506fd21cc104137de1366af54865059111855379c2975347a61772a70631ac1a931558c0118923cce7d0863d4688543c71c801adf435329267aee54d043b20263f38f632fb5369295592a4ae349c98e928482a6f1d60857510000d355689d2387f6f359661f9af3705cafa21879a4053cb0c40c72223efd97903ad6134288c17e089a1cd38643b7f9382bab0f5d0815f1655601fc3ae5bfaaeff401cf5779f7ee19b623a3d2eb0a8ce9745923ef5dfcaeeb3f94d4478e1baf958d3b4569b0d7e636d75ec41367078732bb6e984115cb5860be2a807ca869c5fbfd469b8cb9426da146231c37671ed85a2d9778ec501346bf7dc0fe420e9e92bd74332a81d5812d5cbcb98cdfd63c8992bf7b4a1c4f9561a0fe7011876abaae9520b38c36cf5e9eb8e1484d12d57b149f61af8fe7eeff7b4212f379488f322fbeabb8b801c7ae3f22a787810a06fd0abb80b7b4db8529b1ef014b78de695a4e660054e9875aa961c04504c5d3f21989f8f37a096cf7733c7a1d393107afbaf15e21f09481b9c54627ab6e740989bafbe37ae9e4fc32b9bcc4f6fc3dc094fcff8a03b024ac0e546ac1cb1f8f0b3617c4262ef0ea2e8752328192d0c34fc0734d93d772c2b88188aabf1341ff40e65ed9d7615b7b653831f90dbd7dabedf4d4a720f97b0921ed8ba3fa34aa5390dd08069fb49b459607345f935de47b4db299892e8b7d57ae16b9d860799f3add15fcd943a8ed36b1a11e13ffa410495291f137650ad6403fece95e090222943ff5904521e8db762a87480d4df0f7a5e7953bfe96361fc78a5fa71e934df2f32754ed21ef4b0ac2c1c6f8bdf491c3a31e50b0f8dacfdc372f21db30ae11cd5cb916476db41555db39e06cc2b7f8e12d3a086d7a012364de242ea1a592061e52c4437e37fac1f65919c481e2c7418bb2c732af34e9384a2c96ff95ad4b2826b2745329552ad5e6faed3ba0d9ec68bcf698638818c5f9845987510f4826daf8f05d4330cffc36f0ec66bebe396fcc85643e3d7eee13ad759e0426b63a93299104eea4ee0dd3cc08d91136def0f9fb31e5394f87f8faa184f3be8ef54e15b700b766583e83fd87af6cb4abc5a517f54ee7c8f9c492677fee56e7898754ea5e6923b1591824f2d4a317550b444a1d33839e3da9ba8c426883682f6df4ea53710c2f0d066e962c2dae0b179da5679c534b3401c6e6ab9523758c9742f3547aad1ea37a5bd942b8561af886c5827b2e1f2254c774c5dd8d67a28baa987e24a30045f303b51c0ddcb0b24bcc23a9e3b7858067e2f6733ce3b27af820c803a4894fc395b8c53d6bb5a1ed26b2c0b085ae97a1051bcea18f567ebf6e55969d611ef02d90e9b2ed810c3f03f151fb8e8966600cffa017348ef1edca71a62c05e5e4de408fedf9396168fc9171d1bde27b3090675298eaa4791083f0b5e866a0a0bb198b3c1b5014ae27eb8bb27a11c076e0e67a5077d9a5d25a704967ed3d5685b859f48ecd26d29cf4c3fb538e18829512680957188501381e6eddc405e0ae3e7ec787147c46af5a41dead197064cd884738ec5ae1a78f9c21a9845d22d17606e13c8e294afea02a548259272db656f2f98af671346c923f4d8e965ae46f238c7e51b4624a58b9e7c569decc13f3a5cc159ba52c893fb4b343fc773caa74423c30aa79a8bcd0176388ee753bbe1886f7364df3e2e669bd183a91d81c478df05f967bf35b1b633b55a5ac59f94ec1595559001e6798562deea4701e281ff2a1209085b2d0431f4f2f4fe746e436280ecc27709008c29d2e229ab3292870666a2ea35742dca5c13fce4ff67edef0893f8d08780f24a0a369d9a7ab4513048a5ae651d8d7d793a7a9a2084b5db026ac76c6513ebd32cc4b14e64a6962ff7a455216bfa36cf7fb0692d4052648eefbe4d1a83ab0acc063043eaafbc5d16ca765b3e23f48ef53b21f2a8d24d1153909e7768dfb2f2356230f9007aa42d840b0b50a405163ada7ca4d908513f0a6ec6529ba41bf02f0f10c05e4fc4f48d10d3a08ddf93d22cab332f7d5e0e0f2078fda50dbdf907cdf7b6ea37cad86fe7a0d7eb16d00a554f06d3dc0ab766e558a5ee1ca091f7eb9d0bacfe040c9bd47380391cde5fc559ef4221cb563099e56bcea4bc9d558a610ed94c6b75c32e2cae066ac3acc981ceb0648cdff08e3c387d9dce921ec19c5445c5daa4750964c0e64d3758deb5e7d3508f8d68d247519ab9c960f9213c0465dd877efc8369a8fd3bd03dd2b915ba491cdaa2b05ad7de3dcdb33bd5dc083a726f604e2a07fa258f5dc31559c848cc1c5ecc81178b2659687784c7d67dc93c9ba98662d58f285fe6d3903765f6c2fe7efbef17df6ab84cba5966758334a1ca85bbac52dcb86aff4f1b5abcc1368380846166c37bc5f25dbfd614172079025312a7cab5ac294f789655029605056bd1c1ea529790eced02fdf25a7c3d5a4678a9bc525d9d000f80405af6fce05578770321c1a1289c705329b144bf63799fb98facb7d61d068a7051593675e8d0506031fb8a965de1ae42057069e7f333202e50fcdecb18b1843759ba42169da818394b89f78c1e69df642a607b394cee2445e9c9ca14e037bc9e637d3bc89b0b3a346f82d1c973ef78682d6b7f42bebf8b9cb34998f48592848300d054c9a288b63dae9039bc6226fce99d6d3ccba94298c04d1e98d6c7a525a10dc9a7cd6d74e2d6ca0922c0ed7fbfad05b1e5fafec25bd6da4228b3d91dcf5ee2da410acab4a09c01452b6ccbfa08bd63de0b08f9ebba2e8b5077a7175675aebad6909a725ead06e217e2fcb8450b8c89f9e5911d6dd51ed4075c938091678120820d65dcd15f6ca384390726a5514e9aaf11599884616896123291ff786174e04a94e53d430d61d63aefd7536b049990da56db83a6d9683e29a264de18ca1e29b648550ed33d757d67c0aad5d01879800fa86ec73faff3d65736f5901af5a3d7aaa99807acb37e353beb957b20d191fa3221f855e0959b4338df4d3f371d950623c642ef88414a4d17d668f513deccf09e9ee6e864150a83747ade216b8bcdcfea9495aa6f3a7c20dc9c01a91f06b06ad73a90fea98948b451881f544728bce8e5ffb8b4148c94e3b4f4d91ea0fbfd2c960626d6c2ef130b61935574b61f981b26e354d132a7d68d57d18f224dbfee163f9e77184b9b38b0687ab88bd094e2601a214b87d05c441ac31f2bfe38ee21da9707809a7eabe5185fe705cf8194a059e55be2802c14f5da0cd5be67f1d6215af461b56e4fa62019a0e08c00a2e3aaeea3705bd0ef889b5280c14351cb1bc810842275af554be731dfb56daad56058c4870c98c43fb6b32597f86f92107e1f551c6a1d11a756332aed01f89bf59132fb30a81ac8b650c786cddf2d3eed792315601bc9761fb692371be2ff1a794b388742b53c9ef09d60848af08947f0dfb62e4b13e173ceb52435eaf18c3965ad7a3da69fc59c28ede8db961afc0eccaf46186828be783fa13fee8ca97a7ba323fccd716fb8bf8d009da3352df57c619fbcf557e29c976a02f4e5163ea93447febdb7ce0d0bfbfb225531d33968384b8daa945615201de3b736963376e849bd97c0fc2b91c2454b1aad3a43bca2ffc83ef87e54ee55ce4a4aa05641a3fd04dda090f7fb9ee4140b568837fc9da8d10c3f6d94c4348dc9b60e5c92af2441e4c94c4c4a1dea0f56fec1ff6b7ad6dc030c13fb07e5d0951d35984cc56829f7d803855e69fa0197501f192c6e0ac46563a08f321d520c3bb1921e8ef13932ad45f90ba2d95db52a9cb385240a07c6c8df22f2db382fa17c6c3ee463b7bb7080a43da4a750c7e32b8e274cea5021806cbd1b07a7ac6f9ee5b631b42111bae35a232c61a579eeab1836b979fe57c4f5ae9e5a60248b6c68481cc48794021e134f61082f6f9799ed8cfbc1e623d18117d7b84b085994e26efba57533d6b6c9e28daf51086690841ec9349a88aaeb9b1c958935bac46702526895245e260ecd9921591f604a1f51a9bf987610d7f837d0c63ac4cbb6c08662a72faf4406f0b2b5297975feaf87c61fcc6e260a291fa1db128ac2eb26d341ae848df2d3da4f4270966da7ac7dd063c860b1322b7259ab1691f9be24d052cbaf7fe7064f22cd5e96d518474b7dd58daded2b3a3d611fb27c946861b11c270bd3548fd89ab703bf65719e4f8bb76234b875ac12e21a0bb02b1291125d09b3bad4f4c2d46a77d7622a1c624e5df2d8547a679c16f94afef0e812479c4f771595a82127ed5bb11a3"}]}]}, {0x368, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPRESSIONS={0x244, 0xb, 0x0, 0x1, [{0x1b0, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x1a4, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1a0, 0x3, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x50, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x7e, 0x1, "235e6367bd498479de6b7375a62cd770fb805a2787e40c203c13284c065ba80cb60aba96a01deaba63db02623be3b2f15ca1e48543dbfdace1f44ec75e73a295705df1036740fb2e3cf243e060301fdd5f2ca1646bf94718b7deac5025ff802a36d8ed54a837ad02addf3f62789ec0d91b5a2e394a445575cf9d"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x2f, 0x1, "ca3e41028053f91270c64bdd2118844d01f101e3975158d285321716f22568351340f1e7a039e2ea41373e"}, @NFTA_DATA_VALUE={0x4d, 0x1, "0987db70a1f0e31b67b0abef0f73f02b3fc679275f29d30224d213582c1a7742d2afbff7e59060114a971d88124ab1dc7910415d4617d2be02df470702ce683bb2cd56aa2bcdbed04a"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}]}]}}}, {0x14, 0x1, 0x0, 0x1, @immediate={{0xe}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x18, 0x1, 0x0, 0x1, @nat={{0x8}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_NAT_REG_PROTO_MIN={0x8}]}}}, {0x2c, 0x1, 0x0, 0x1, @numgen={{0xb}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_NG_OFFSET={0x8}, @NFTA_NG_TYPE={0x8}, @NFTA_NG_MODULUS={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup={{0x8}, @void}}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x114, 0xb, 0x0, 0x1, [{0xc, 0x1, 0x0, 0x1, @fwd={{0x8}, @void}}, {0x1c, 0x1, 0x0, 0x1, @socket={{0xb}, @val={0xc, 0x2, 0x0, 0x1, [@NFTA_SOCKET_LEVEL={0x8}]}}}, {0x10, 0x1, 0x0, 0x1, @counter={{0xc}, @void}}, {0xb0, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0xa0, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SREG_DATA={0x8}, @NFTA_DYNSET_EXPR={0x4c, 0x7, 0x0, 0x1, {{0xb}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz2\x00'}, @NFTA_LOOKUP_DREG={0x8}, @NFTA_LOOKUP_FLAGS={0x8}, @NFTA_LOOKUP_SET_ID={0x8}, @NFTA_LOOKUP_SET={0x9, 0x1, 'syz1\x00'}]}}}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_OP={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_DYNSET_SREG_KEY={0x8}, @NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_DATA={0x8}]}}}, {0x28, 0x1, 0x0, 0x1, @fwd={{0x8}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_SREG_ADDR={0x8}, @NFTA_FWD_NFPROTO={0x8}]}}}]}]}, {0x1698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY_END={0x244, 0xa, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x9a, 0x1, "d32e1c89c0def208594768a19e9c899fc170950cbbdc17cba66f5718a8f25d1352569298c414baa9ecda7837cb32f0e4b1b14c7d48b5e34c1c508a5ac0f40093fa47d5009476b0537c898ab92218bc6e84efb6868fbfa7461ed88e03ba2d80d65e4efe9497e9af63166ba4b5d427d5b6c6c2f827e4b671292ecc854f0300702d8448b15b558a1ace4f73d505a70ae456c5bb3ad679d3"}, @NFTA_DATA_VALUE={0xa8, 0x1, "4ce31e5a27e317e92aec8aa270d1632b1ffa2f79ebb39decd9a63f32659f62979e1c69f04e24174e6d8266eca8afff156ab19d2ec30a4111d2a90e974b51eb9b91214d17223ee872338dbbe20da9420c13957b83b57aaa686650cbaa5afe5c1b6de1c04094b3276b638c25bbc9db339f5b2aac538fc069355c662f24391ea5b082c86875b423265df040a69198d6542b65d2a86f2220ca48152327c3c7fcd66b02d838af"}, @NFTA_DATA_VALUE={0x71, 0x1, "3905763298cc70c2caea99741467915e7407394e1754bd9496e52722ca5527e6958bd720eadf0be7d1f93d2cd4d14cc08290cadd708cc896c002b5411aac27998069e0035a623aa4d64fcd719c372409cdc16fc75daf5b22e77d8b024172a24f4f1f6a18292c793233f968903b"}, @NFTA_DATA_VALUE={0x77, 0x1, "2381b8ed7d94d7facfc5a49e0bee7b56a358cbe9428e66f0ce009127de8bc2eff05429b2ba7489005fd146f2e7ae168920535a630335cd0cfbbb4fb8b129107f3560030f2ffa7f81ad6c8de453e324194624dff8593061049295ad95890157dc4952ca5b6d5ea9255f9af8f566b98103482265"}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0x2b, 0x6, 0x1, 0x0, "385bed223b75db83e98d84427c66ad8767dd9dacb5cb0f9083b6954af27e7b10a527599d19b6fd"}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_KEY={0x27c, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VERDICT={0x38, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VALUE={0x1c, 0x1, "906b6d4289d0314a956df8f91ce35af90992476724d323ce"}, @NFTA_DATA_VALUE={0x59, 0x1, "e0c3335a9af3f7aa5a10341d48dbf25bcb5a6f12afa229561b42addb35c5f5fc3396103a453b354b192aa7deec39c59f3d815d5cb023e4e63c425399e1e26080fe3fa9e6321f9a4533238548c37bdec2fc624a7f85"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0xb7, 0x1, "67706ca103fe05d3eee4b487bd1ad64b6697e8a22bfa27901d0e54bcb4dfa29de6dd41d4b538df41d8821b68281d3260dd36731ca9725f25261ed73b517ae85d82eb5a3c74a8a0ebf452737f59bce87c7c1a1fc6d9d14a124f599f657ad71d431918dabc0cca316a41ae1714f7679a49e57722d09b105ccf5237cfdc2025d144ac0a038e22dc9698624e3e155ffdb88e3798b0b6c6d56d8ee2d827cb2bf346bf1b397194a65f9497a9a4a0eca0c5e89bb49f1e"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xbe, 0x1, "be659a5b051177f0839c8185ad6353501a8725264847b7bd8a26d107b54f9acc3c7a5cf0211e84b73460500973c294b8da522665b87a75998a22c4f58accfd1d004ea878f1794599fba047f77ae1c0f6f6abe90e59c17533663a0d31918a37dfc0ac4b537792fb529d13931e9d0268404f479818c916f5b1a31d655976fbd865e503db4a62ddf635be3e8742824f87f6f02b3889183f8548b75375bebdcaef6802d09bc775aa90c54e431684b41e79fe14c9d7ace46259671179"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_DATA={0x1170, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x42, 0x1, "69b08781dec556df2231d6420b13f9bf38db03c22e11c081d2179a49df95b87b1130ecb197ada31e49853fe7b8de771222759d78de77dcc94a8e96f42b8f"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d8b7d1c78dfb2e07c1eac686c9151a3b4ab6edf192ed651e5cbd701f5d2d9d677fea4fa7e61a019ece0428b4ddbf18103ea5f7b9c873c7072a06356e72e1bcaf07b8631ce4bffbca99cec2e17f9009b2a0a41934b5320751ca5aa83447daf53af9706072b8a120911f1a33313e12e303fa9ccc92a44ac40b2af9219e2e711e1bc1739cbec42f97db1f8bfc9798423034cf90dd83a3c0b21420f567db2468e3925d2f5e702b432f7f0b86c3f85ac1015b5f617de89d3a874785b74e9ab58d09b6eeeaf80aaa0053ccaf6563299d666915b977760da6f89e6425e43bc5eb2b22c2de6783542731ac32f28336a41cd618886636cbe8fac39a608af60c77b4ccb221c3b8c63cb34eab3988a0e07754bd24674e85906c30d80d251c272998f743db0728a8ba3ad2a2b52281f70b6eb274d153604323a0c985f6edba15cd2d3ea6fb6696ecadd97487d961623018ad5ff67e3d34b51cb76cff4721f1e1157a9e0ab7dab955f4b400f1ac6341eb782deb7b3b45446c33f42a92f346f6d895a8cac835a7f88568cab0a5996c831c726738291166123b545db4bbda80e9794c72caff7c3e65601a1563646dfe689788622dc17b76b1fb32c7b17b8ac797d49cb8a9fddf67b4d55c554e833d6292e89445d074cc91f8703a80009a2b8c55c8a4e97155b9dfd476416675a9c8392c9a8fcb8b396f39469286fc523bfb84ded7e6900c2e5002e27e39f51272f61adf478b73af270e92173e40e9f9e79cc25d9cdf4927dc3e032da464b29d071b193916e76274fb060a19e9298d526ceb94b839731d9e9e25680003626d26096caecc3228f0a2db1edadd73b5f8670a7c04f1b969c0358abe56f5067daab39a06080cb768de8fb8450191a74787a2660f0921b20bd65b89e2bb309413190e2c31c14b1e8c7268e3d8744cf03f313d78acc473b98ae548f4d8191e7657391cb085744eac5acb02d3b11de27f94cf486ab5375911cccf3c23fc74e7116dea68d10caae2ca05dbcb8d00da03816801a0128f685bb486c3b6ee8f8e04952fffaf97764a1752603d2ca289896fb1fae52728bde92f9375b3346ecccc68a503b14082bb060aa2cf40510d89af636066d62f99b97cd22ee14101b9ddfc7d0f48eaea4cb34b27c1c57cb090f2f77aa4b20baa3b6c40fe75dcd28e96bde5719c3e51bfff9fde17efde8ff4b7268c9fcceb5c30c169f6a4d8a9ddbc54fde292a3af6947134404a5d130be7d5af10cdb8572ae82c0c0232f365060226bbf6ea94ce11929578991002d20bd4a2cdd42fe5335dd9e0519ebd7184bca1964496f60fac9b3b816da0ef48c66e9dbc6320dce3b2ec33863d276566426f9d5b9ae6b73d73395c1f5cef723d23eb84a786941e01a06987d5b1703827872d0a30134f95f6b4b18a73a7f1284ce1bbeedc854a1c958f3e4ba8a185fc464c52a3ca9e1efea89c75d4994272681b7f6a375afe47dc79a6cb42dcb4a52dba75b793d30753e84c0b5b45a6cfb4b646773b1b5ce4ad66db790498515d80914ccaa37d26a6fc66ebc0a27b4737598da8d41670cfdcf0477f799918dbb837eb5fe7feabd44b9090ad8c1fece27dc493d304c5904cbe71420d38da850e3d5a2a0841b589abc5d1fb1403810b59ddafa3a1698c45b2975d858d3d9338615eb41ca744abf5c7094de4661c05be60bb3e51eff08eea6383e26e316f331be341bcbc85e4c9c749a1d27134be5e15e3f6cdf2ebe43b5b11bbb3f12ba5ed5fbd1fff0aa2ea899422c3399d6f43415c4ae1d62210eeadbb37c7581e99e9c889153fef3cb0eb463b3c6e1731821910f9540fe660b1f2fc7feedc6c46f7e5910c8fecb80374d8633c77123cb8e3585d58539762d1de08227063e0a78bfedc256c7e9d457ffeb2a299f445c4e6b9cb63fe0fb60f05c740ad7da5cf3df7868a13150725da42c504075d5d8ca38aa7aa9d91348c28e21853fd1f846f31d6971aff09888a3ae771e714d12e1c7642369bd77f5042ffd636501cbe88cb4d8c3634be34b21ded86f16e9265b190ee6759624993225f30331a9033e7783fc34ab31bdf11c500158bdc7dcc542a31ad1ca92803a440338c09df91ccaf665a6f9ad86fa67a11f629b167e1e6620718a658bf3d4882d8462ff1dcbecf2c51f6f32ed0bd1d21add9af47b6dd9043ffdecd67cb352559f6b62f27f32f8902ae5d3afae46e5ca7367b754cb31180581ac58399ba2141a0697f6ee527506d0ab30b304e695cd02309c3cf9a590b126293b376ae40b06699047fc59251baa17f1f3e626a75e8071c9f85b2454219f4680bb4bdd683fc6ebc57798df5b22f4b624b95691d3172fd6a55e9a9d357dfe7c026d8478de1f260a5d94d40815ebd0261c5cb7341cb4b2438cce0e43ca0e9dd33658522295ce8b0057c1bab966b17df5232b1e86dd0f27b155b6a3a801b193c082308d070d6c308c18f8993f880a21e0902ab73ba4573bd01d1aa5d945b951b78754855739e596af057c199cb2ae43798f9549e23965275b9d18c62d74688a49635afeb593ae66b8ddcec3b8f3845c7f3c7d23cfed6fe5118aba362a51ccce6c2a480de7fb93274409e047e5120ee7c9213dec6bf4f9780fec0587a01dfa0f48fddeca89f91aaa92b14eff5e088f6b65a39ebc2109819397bd8e3ac867d97f5dded366ff47396e15b1c75f664484e99d34467fba5cc1508fea90f7d02de155bb284885e5c759ecb40f1b8bb77eb8a405bb0de54465a076edf432ebd4f440156f6b78ab23d1d3f3933eedbe6ab7d4e577302938ca8b37ffdb72b465e609a72652ceed64c80086e93bfa1744c42d41828b07764ef678bc5dc814bca7115efa5833c2588edba562cf31d44d17093f46843a443b2d49bb85c8e2d1e35489b29bd59e939d7cbf795eff5a80911cf7d677a701cc5f8fa9e7519f8308df278180d0499f23564eee626daa2a14c37bd4236d97c5d2d041ba6ef5fbc9636b7b5d643174073aaca2785ec882a6944f05a46b0aedd5676d11b795cbedc56f4c427bcce194aa628f7539d55bd633021f81bafa6069aa43b28315452d03e27d681b2b985c8a6b4a8c801a5d70dc5626e07f4a4533b201ebdb4d6c7a945b1d514c1430472fdecd41f377ca4f6b0a0635f08b10ef9e580663529e52815244bd8c9a12138c561a66337b06a00a5122c4520a93c109fd5d4b0c247f37e0305d8e9994b47a412cbe84f88cee2c169fc7bb5de8c5bdfc2e2174907a8daa8f58a55cffe290e217dc1aba92975ec217468520a38b9cbb3a3091f9cab1235acea00f5410243a1ea86a382a3259679c1617deb87ca21eb08ba1b1dac9da91f3469d3ec845b99cc08d74251e1958287ba004a500467c082e0ca8aa049d848edcdd862420be5a7d790df55350306622a565fc83cb93330436ff2861d028865f63a8bb6c60ee3949d286c9cd93f250997fa86d710dc2c54e2e3a7f25aba837f87414d680c95bad809c45557bb0e0b565468725260d6b51ab1cc0b394816cfe15804be303c79570b8f3ea6efb36daca51e68bf068f685ec01569775f44eadb23663625ae10c554adac0a35fb88593920a4791cce46ca9244c3232e6327992ded153cd8c8cbbed48bdb9fe119e8ced3b9d5f7d83ebe449be4d48b6fd544ae4cb61eaf91621f8d50972a66261825fe72e7a24e0b7dc096b55e9675f2bf540a08150e05973e0f37424b37caae3d4733357b6365515ce12155cc2dc0712ae695a46a1e30169e677e49b06bbcae1473ea11944083adce1a5b1da5254f0b4f720b0bf0f09f7047f8caabab4850749c784267774c1d153e58023690e6d401e8ea064330b0774a337d4793e23b25faf8760574baf563d977c14196052eba6322ae31df16cfa1174ea9659aecc77fe545df711e237eadc211706e6172ab35453183fe85ddf97148fd2e55ea01899b221e03fdb2dcd94d33fb107edd49420f45661a461e1fe84dcecea36b9be54946211b4736d09a820bfd793c1ab7959ac57561801df8c8e1fb1c140787e5cd1d99fd16487c6eb6c6c3c1cdb730268ca7ca4bfad8f8466665a543e45759e20d02b941db0edcc8ead677f202e3ddc39e1b0abcb5865eec101458ffa71cef5a53edab821cd4100b809b82987898902815641eb5bda11efc57312a94a30f2c9ebf9fbbe29de2a7a185c2521d5bcfc2ec96a3f26b824905fb86aba5e0f1b2a3ac02a9be2663ca8f88caeac0c1aa47d51816c2f5a0ab324e5e6278cdd61b73b6f96da5fe1f9835a494097d2f6953f7dc95ee5cba05f24867d7046e0a64882053f30eab17c21e70a9082dc8704859c95abadcf5219d8cd8976d3a1e26a54090db46d71d9756f06415ac62786a903fa568764db1447f7922172828af304c39e2932646c431a538df2cb30721a0f19b8c5657fb2694f941f113afc4ecec71d2931bddd296edb5c4441218eaef72739aaef99eb520312c69642cc9de27bf40cdb1c8599f2dfc2495845a08b207fc87adebb8c43eef5d80d6cb10c62a303cf4a7f5e937527b8a26ebac13f6ae954ca85539c47b3535827b3697fabcaf8b59801c83120e2451523fb2c57e815efbfd02d223187fc9c31ad90d097ef996faf15f2918919583d3610a248553e29ef6169be066916c2eacb14d91a341a584a8ed77621ea387de3ed1468e44bfdf13117348bc9a56e4e9d2bcf0bc14b3e9b8a512b3c5a15d3ce602e01a13924d4252adf79e769226e0659170ca0522433c6c139fa9499530554dcd968a5eea303a96d66151bc985415c17c19633927650abc787aae2ff17667d18658a30d3fb3e0891220e7924160552fe1ec43c01a62220128ae4116f64aeee7fb514c63a034c74bea25287ce8696df454a36db4b37d12faad14109f35c175e1c8dc63c95329c78cfdf1faabe82c75df1cc774ffe3e28a6c43e920ffbf2bc18dfe775d7a1360d593178d5c73fa46b0193f1e17b601a18e95b7eb941fb18d7c6d755537ad2d14ff95299379f7c940a6f992892ebb3381ab13b98ab9ab0d0540fbea8f4f203944c767d6c63947250b3a3bd0aa638f9440368ac90264fb6036698e97f910cb4bad7ed48966bd0baac9740fdbb38b5c51e0bfcb208413022d8b948663a1190fb9c65ca99b8ce3a1163b1ea1f7d388d2b6c13498a13ae972009b52c0e94e27d3e22f82c32cc0c4677aac435cf323e58b949cfe0eafb761786d983772b748dca7e5b6d49a6bc8dde68f7c88903c6ada6e8ac86d667c365f0b100eb0222609dc3ce1768444abaeaf9cf58dbcac76b21e64dcf2fea4797fc48dc7c5ff1cbcd29bc9020ccd69eec773dff95dd0f1bf1ae0c196dd4f0d42584b0e4c43d5ee1563b3ac001a6396924f998fd885bc5bbaf62bf2cdf34f3fc21e4b6fe0f0a32273b974cacf3a20405061dff36f884a867e55c5d72af5e452b166d2cc59333f9b9e006d290e159f3277cbd49813d0002ca971062a3a354479c5e89fd5ad304d4770e5d810a0348fa5d1cc6489a389ba3137a132bc415f4af58702ef0244ae195d734e51dd8295d4561be9f783d8f575357dda0a4c97cbe827d5e08675c4a782464d1d3777c8e6ffabf63e2df3af4e0dc8e3714e0efb1222e82a15027326d4ba5626c6c9b4aec56f33c7cde2e76f35d03c4b99330391ec22fcbf49cea1035d5f430529cf4e18cd7c00d5610205ee1ce5e0f900a2a62b240df6082fee7a1ae835dc52fb8e90f66e1929227ad52377aa383a8173186729231500aaa2eecf415e7775382ecf8f2c85803d21bcca238c66dfcfe9e8b958bd5996743981618f2f3106390dcbcfd937f0990bd01"}, @NFTA_DATA_VALUE={0xcb, 0x1, "ea9a5c67eda9cbb571e1beb9615804d7571d11e333f39da6e1d4fd059264429dfd09f910b925b5849209b67718ee47594c076d17bf118b721bbc05a141d7ee9f9f7d6dde61db910ad5880e557ba32840d2e9ea545ea0690619e92fc313f48b76df9c869996888a2202019405b8f33c46179fc70f7c547cbbe651db628ae3576b229bf998085e36e107eb14ff372b1f6b97a636d252314af5fcd83b1394d46ba5276eaa553eb16cc2bc836e96f9e258f9d29d5a8985d0fce79cee6166df6a37862472d6a196963c"}, @NFTA_DATA_VALUE={0x57, 0x1, "227dc6e346daff9ddad67b0bf5a3fd45d0dfafe3746839c821b3a09ace64a81f98b839ca275b1cb557ab3753bbd515aed0022b553ee9d66a6770ddb641b1c3710e8e761d25d51c6893e234d96b79bb42bc1dc0"}]}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x3c, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x20, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @meta={{0x9}, @void}}, {0xc, 0x1, 0x0, 0x1, @osf={{0x8}, @void}}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}, @NFTA_SET_ELEM_EXPIRATION={0xc}]}, {0x230, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_KEY_END={0x6c, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x68, 0x1, "a2e6899618c802843c1e919dde83e83ec9492e3e539750c12aca8c257103d52c154200ad6c782f9f07b8d1c165a397a0a664518e8f7e7748374bff4f11b4c853053f975e0df9cf76fbe627e4aeab2c269f63a88ad9bee731e67d4a3aa8fe7f89ed1593d3"}]}, @NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x1a8, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x54, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x44, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x69, 0x1, "da335fba721bbde8b649c9466854067aaaba74889ec0ec7eacd5a8fb32629d446441e9a4e3e7c865245ed88ecbded3fa1bb882e0fbb8513931afbe8f799650fb1dc3e70cbcdb757d684e49228515efe382059d3fb0d818e5aaad7bf1babb0587183e875b7b"}]}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz0\x00'}]}, {0x174, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_TIMEOUT={0xc}, @NFTA_SET_ELEM_KEY={0x4}, @NFTA_SET_ELEM_FLAGS={0x8}, @NFTA_SET_ELEM_DATA={0x158, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x4f, 0x1, "ab13008ef00657dbb8ee2789e6672e228f655fbe1586438f5b62245134e10dedc7fae2b1ffb435eb8cbd0f08a8f3686da87bfceaf8a8428abdfe0aa6d57b0bf60bf13e8f5b9aaaa735adc0"}, @NFTA_DATA_VERDICT={0x30, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}]}, @NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN_ID={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0x88, 0x1, "22ac169832fe5becf47d6ba92c288597c33638b804c0da984ff5721bd27ab2b076dbc87551438737112250d67033906acb9f7aa9f8ab08648d6e0e5d5cd3b2c8e9546abd5ac2cda88c5ea710b6789e530382d0b725779a4e6cbd2ea4511fe3b7f8282242ad1c4cf48544a0eff94e608d90f5c33200dbc1e632b76ed7560721d6de35f8b3"}]}]}]}, @NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz0\x00'}]}, 0x83c8}, 0x1, 0x0, 0x0, 0x10}, 0x800) 00:43:59 executing program 2: r0 = socket$nl_generic(0x10, 0x3, 0x10) r1 = syz_genetlink_get_family_id$batadv(0x0, r0) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x0) sendmsg$BATADV_CMD_GET_BLA_CLAIM(r0, &(0x7f00000000c0)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x8000}, 0xc, &(0x7f0000000080)={&(0x7f0000000040)={0x24, r1, 0x1, 0x70bd2a, 0x25dfdbfc, {}, [@BATADV_ATTR_MULTICAST_FANOUT={0x8, 0x3c, 0x2}, @BATADV_ATTR_DISTRIBUTED_ARP_TABLE_ENABLED={0x5, 0x2f, 0x1}]}, 0x24}}, 0x40040) 00:43:59 executing program 0: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0xa}, 0x90) 00:43:59 executing program 0: syz_genetlink_get_family_id$batadv(0x0, 0xffffffffffffffff) openat$nci(0xffffffffffffff9c, &(0x7f0000000e00), 0x2, 0x109) 00:43:59 executing program 4: bpf$BPF_PROG_RAW_TRACEPOINT_LOAD(0x5, &(0x7f00000002c0)={0x0, 0x2, &(0x7f0000000080)=@raw=[@cb_func], 0x0, 0xa}, 0x90) 00:43:59 executing program 4: r0 = socket$nl_netfilter(0x10, 0x3, 0xc) sendmsg$NFT_MSG_GETSETELEM(r0, &(0x7f00000001c0)={&(0x7f0000000100), 0xc, &(0x7f0000000180)={&(0x7f0000009800)={0x83c8, 0xd, 0xa, 0x401, 0x0, 0x0, {}, [@NFTA_SET_ELEM_LIST_TABLE={0x9, 0x1, 'syz1\x00'}, @NFTA_SET_ELEM_LIST_ELEMENTS={0x839c, 0x3, 0x0, 0x1, [{0x4}, {0x4dc4, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_DATA={0x13a4, 0x2, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x5c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x200}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x10001}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}]}, @NFTA_DATA_VALUE={0x21, 0x1, "2933ce82c3511badfa46352b1f19b1d98909071acbc9288db101312c45"}, @NFTA_DATA_VERDICT={0x10, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x35d}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}]}, @NFTA_DATA_VALUE={0x6c, 0x1, "f0a96181a9c38c72f7aa4eab49c30e87114e054f27addc1a7595644802afe56dbf8625ca87842e3747058dcb579498c9ca4a55a2f62019f05a3601352d44724d0f75589b0c2fb4a3d4cd93eff62ac8a9101672b1db2d67831d9510869e17a2b1cc5b9613da191875"}, @NFTA_DATA_VALUE={0x90, 0x1, "2ae450cb419caa32aec3559b9e1fa8d32262d701925bbe209e355c2f2f106c94ff2c75435d0c9953722453c68a33ef517f2698a6d177bd7e0c80c288c2d807681d186f1142dc22cc4fc21ec2b1963d991707084220086a3cec9b7a1d32a9207abe8a4b9782dd5651f2c9e22bd40ece02045ada357ce1832926558c44e1c4a993e3385212a5dd0d50e4731bcc"}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VALUE={0xf4, 0x1, "59ff4cbfcc33351667523bfb8b0e504afb3f05cc8a01b153b866032e0bf516094a04b070c1aa0a6a55c6c7f3f25efb8cec128a91bebc36fb61edf7712f670720c11e808dd6cd65563035102922988d034f926cb3843830dc362f23e49767617d7699a5c96c74829a36f605e41e06a5a1d926e1870ecd027a4e2ed0dc81bad90c35246f7656ddf9c3ba4c153f3954e9b6e4b08c1913e1299e05b18f4877a1c273da15763090e9e40dd714a8b36deaf2adea4b1784d716095607d9b1d99a5cb15b193e0def434367af75c0cd368589c9799b5238327d06ea13f254e32671440f7333d30e48d39c0d1bb2cccf51bdfa74ae"}, @NFTA_DATA_VALUE={0xd5, 0x1, "f3289ac4ec2d3278a68dbebc2604b67fbfdb90645f382e2b1d2ed6fc9e4211158e0f095d459160361f56daf9f9d3efd888902b73eab47581d51275e8658cfbf61ce1d448c453544b40f10b22bb1e5891c141097e172a40b22a583c6f19c6c302ed7f0809bf19313d64572cdc436de5b07d2834381d28a417003248e81cf7b2272043a46cabb5e278cdc5aef3efa8dcf474c20d7bc499b8a6c81e50dcb17401bbe638cd33b406e89ad5937a702383c3628c73621339a0977cf0ea21b730ebc6f532e6bdbc82673a93b4793218835f8a65e2"}, @NFTA_DATA_VALUE={0x1004, 0x1, "effea77c322e6725457995f4e80dddabde25d6a950f50d0011f6e13ebafef33de3f5eba19b562cdde422ac32557e9cac530d4ec7f87a3c77a23d7467c4a93cf8731451c06d7f7c9f559f68aeb2c7373ac187db4e7431fb724026fba3088abad5be43b8aa4c1d4c9a790c78e2ab9a835b46f8929181682715bece6de5c392a920790a8ea0f5e8104b19a6d87768dbb7fd95a38458882fe5faf81854d143054b2882875f9731a9ce6f9604368e3767281dba214b36f859dda2bc9175096c5f9e13dd05b9d83985ae6512f0d5b004ecf301c0c68a3dd06a87416712166ede5dd79e769e2425e623a7bcf33ec3a385f8d107f8fa6683c6809c5f489068dcc2a1a06ee0348ea013a0912d8ddf308eba7b2436fdfc34cbeba985451e0834e10927463195950986366908ee29b0fb93cd0375950057d00117cbe715c1f1fac1db08ec869f01f79adf7d515381f8444d189e4399aacf2e8fcd5cc3c0070415287ce94cc1033b222d87a7af025c54184f872ffbfee432768cb895f1e9a895be45b1fc577dcff5d89707265af808e66931d1f1c521c707c94ac6fa9e900bf4373cd6a109e21ac47a80f7ca40fb880721b216d5668247d96258900498a7f8262fa978ecdca9f3cc96339b8555d3be9e66becec0b4e953cf11dc9458dd402654f954c82f2da62c1803f956972d8a2fe61b202df78f1402ceca01695308583a1b72fbc0ae9ce8082a424fa0be33c4808c34d5887784193ba6e38290bc7d86a24fecac535434cbff1b4c117d5fab412b57d567950f10e107290f79a9b92d6a7a9d120fcc6d9af285ea7888310392dc47a8aeb74343437f3a1833101d3e07b735e1e3e4f094c23298524b60be358108399cf8fd3659952ae6d9e4732394a8c0f83e92d34d7b6ddef4d0d311e3e572d50b54c814c34da8aa8b1ebcd112449dfd367c4d2eda0ee9393619c836c94399f43074a3ea030edcca69f613efb2f23d812184aa2a3dd1aa24c50083ae76dd9f12b37008da44fc4723a2855d179bc29a13a22c832a5177e02ef4e16f07c4a4a3f7e7fecb622f7f2cc34ab1718b283c9125a283b85f267820119c6265fcaa38c39f5c71aaaa62e9cb570953613ba87d60f055f696a135268b621ef57e1c073e2290767800f1984656f49289591d7c05a1125342c5b4c46dae867f2b4ef59c507621a93d4098089d7593edeb6e3e08ce63db6cb0847058e4b123b4c9b947b0ae6c949666709d6015cbbf2fcd1d360a437aa0106d7e02247d8b2bc0ef5f1add2b83d170f4d160d6c3106d35c4d1ebddcafa34c70615a31db0d347ab82b89b4e63981ef67aca66cc1da7930864a995928d03a0f8459c1fd350d0df058c239b9761f599d539d3325a38cf8f563c8c3c4367c647a772b5b9d69b7917187b1d653df6bba2f90c5d551b1698a835bd2c36016335afeb1e3ba20439614787c432685669371a2dc436cf348cfa67eafebaf50142dfbbc58c1b23973f93be239f86fcc0fe511f8ebbb56461e90fde312fd11274e57f49e46fccc820c5ed820d9583afb4aa8fd0f96f62f2ab5481b1544a2cb849a5a9b03c1b390147a5857dbe303b0efc3c92e367a1860ce8ddfdecc9974b49042d340ee38312c3f294e0cbdb7bde9b96b584a6f7e6d41411345e9254cf6908d40a96fd6f83049ecd70b59557ae4d03ee6fa642dc75be58e660300e60df93d6da551bcd00d95fbafa8e5040e553c567cb14953c3df142d379697bb7b8eaff462f3f22edd8680c21adc717c45f2a4d6f21b63dd29b6f8779077f450436d44c2faadbf3f11f473a38630450cec1fdf3f6799a5765238f083eb1e004cb3d9bb3e45b93ea589ed7ef73a0deb1c0676ead31d3a054932bce665f1fdf62364481b722bf5a11f34b12e507db421803c0a03696b718dc6648afd2fc484d8938337e21aceb46d03254b1ce896bcb651e13ba47d7f00d440521e7c69cea12fc8fd01f6be48b5d2ce8dafe42bb33733f1ce248b3538826beab86e474ab0a4889725434985aa037c7d8e8bc9a9981c0bbdebda3c506594b2b3f3baf1c0c3b4204a281c5ec56d1514b12f361e5173d9783e4d71a26438447e9b1347177bfcfc64e7f0d038acfab45b4a5a9bcf3675004943ba71805dd72b21093e2aac8e3e5bd344d8931152b3a1ceb04d9b112a429dbc7f187ee709a9342faea6ce9ad07560b9858336a9385837c3b50aac0aedc8f81bf0a5761364a45502dbf9272094aac706d08fcf3aaf19899e7fb06e0873dfa98f67ab162e7f5187bfd0b00a968011e73ea4dc0edd638faecc7c8848351d635aa9510fc65e777152361fdf68a8002156633501cceaf4819d0c80a913c0a5ddc7dd11f2f561c2ce2dc7dbba48f62a4a54251704cb35a2738076fa21b53ac9571f8cb9608d8e44d052f08c1474baf46ed03711029ea5b0eab02fb685b75b063b2c1c0c2d2bcd3e9926435b2b3796ecef43a844c7b23632a71b955cd3546356a54b09129fbeb9e20842febac8c7ed0059caf96e90b1020dae709b08016e74896539a538cfbd634049ae15ec77a970af8045ae928d01bc54bf0ae10fbc3cba4a796a64b7eacf8d15e9a18b97941ec9327450a3878f556a76689673d05289f94b712c334cd334a63cba8d66c836b8d5a8fbd3f9d2c4ffe9291d10c6c1d523ae1cb066e42ab4bb4e34fd35251ea578dc202b73a75d3acb04a420b007c00d5e6fe21e1c7279abecaff83e96e64f3d8f89b293c3eb316a1e679dc87f3d0c8cd8a973de6bd461210834a1240146db751ed4ff4e3fa30ff152985d094d5615c7215d41a12532185ca71c14c1bf0081d4f21129b76ae5090e13ea15e47be4c0fb0f634a141335766942f76ec502f65dcb1aaf13fb2dd23e65e94463f584c08d469271852c5588e548cd0167a251348ec2f33f383d9583dd451e82c0655e5f4522960ae4790873ec142f65f4992d960446c3a8b746ee426598236c87da44fbe63353554e42cda45df90fa331a199afb20b0f08279744711befe48759ee793da45357d32836a8531251bc09925d4cbf6d906ff4237d79fc67516df3967cc0ea2fbab056951847744cc34e3fb909238bf0e73de31aa914fcf238e68bc981dd06a6bd4603695c233b9657888dc915785ffd072358bb5b1d10b07ffe7bb44fe07dcf23aa3c4859e790b2d65fca09bf3f6d258e7f318ea8145a33e537ab9db1ce4fb9a595a7fee1948ce7fe7fe6b833bc47af19f139adae492a3a31aef57aef3c0d280305e3f62387e742bbf43e24b6e600d4f1f186d05553768b83ee8dfc27c217ca462e096a645991a109125d2be6b8b98a7cb982ea28215d4bae5a621d003e8a7958ba1a4df24000e214521574cad90fb4be1053af094c2c351f0b1d51b23ca1b1107f887d614c7d30d94324d107f465b54dd3f7636dd54ad0140e00b89d4e4c7baa4aba7d8de231d726ee83ed4808687e4e83e2daf5bbcef3923dc4985f8d03ee8ae0796436ec61c026ef234646e8954d5f92eeff4d55f5f09630b1aad9ec4a2510b53019123f47d57bbe3378e9750f2347ffda57cc0144cb7deaf3eab9b70f6a73fe515f9b1a8e0620953fda34981d8d393f483bc2dcd5e6f6660daf2770b8d477b375081972aa5b426650197c36178246e8053235d687d8a5544137621d0bd8065f3337f5977bc33290034fbb2853eccc6747c0d549629fc4e32841a7db89b499e00396fd6d97c69e95b92720068c5583f7ce157bc48e66db68b60af99b5646472fa58004823164f21062bfb2f6cb74a45fb7c602e519c25f6c139a3f6739884b48b8707be1c15eff89f2a9d63342933f52f1f16bd057ca0a93665e2233ee9bc1a3c235989a19c7f3457f66320f3f0eed930800a8a7f851a4be3911cf749cf89532bb25a7906857bc9fd31ee79959da9514e8bd830ea6a9336f54a049c394c9d39e94e2af304e0819ada3fd0e45b7f69867090a74881cc6ecbc5eb1b73b4ecad33c98a45efe732d7f843f2317776dcff7d06ec0253a231a49ce2aa009fa054b3c73206f29c6896e386f6df076df329a4747eddb4c5fdb510ade3db51dfad1f972646639555231610fab9b5bfdef8a93a5d39da1e01797d9758400a8632d3e9f5b67fdf19d63174a5b098d7cf10a705837ddfffad38b1dc553800c8a81cac10192bdba1c1f948612d4ac880a42e8e1b641ce22e091c92d6fa9519702e860acab4d59a0e4416cb27c1c2018312721f89cbc2552db81c4df28c3009731a2d3b05f7e6600694d25e8d194d98f41062529eacc9ca6290ae2608c65cd58605d340504154f8266afb4e5dd87c74c9ca8a241e34dc6798aae6e23e035de45f38a023432d701cb4fe6e64833a61ff295aa4c88ae5dc23be60a54ff096e213f091f519c7e7537e9710f6d0f9ed88d370415192d4d4b06dafc606e91f7492c5a27a77639b698a89778f5ca55fbe63ab8625d2b881b28caa029062b3937c71d4ac2ccb6c3f2471adba7249a72ea4fc5ad7cde8aaf757750aa33c8253bbc9eac30ff8a0cd286b77b05f5924f4e405556cfc7b2d5b78d78da7e9f6572345b05fcee3fe5cc9d8ba65565104e399fbc056288c71974b630931b6f2cb8cfc39dd4d22b11dc8d784fcc51a5575db339c14935c04a286e4864af363c034e66e8383da863cea84fbd7533850d220adc41601668673e93afd744571730ee46f78ef070ee157ed0974e1387100c0224c8362735a5447802b91111c5df1273f7ef37fcf1ef21b20a37351dd118e7d37662bcaba9313016a2692c21835df57c8fa606430e2d3d80d7b9d2f0ebdfe97a2cbe66aba4ed29686d5f6cd22d91b30bf1028142b844ec6fad9bf65b47c17eff177958b8bbf630623a3a76bbd2a5eaf0d147a4ccb86a65a776e3c5dbf8a7cfc40a4183642bfc9ca7cecc9729ce954652456da0492e931bedb284cf39f1df13d3e196d343f6a158ec1f6050435f34074537e6030540060aebe4580401a8910d0c8fc00a2e97524f2c1c4ce5a02dbc5c88e26b788d5155b04214e97feb2259525ba7d479bce4b5a2a6a79a060f3366659efb383beac1bd7b5f9f404b149df80b824b09639b10892f4ca0d933fa38e9eb8dd6f3f10937ccd6f75bc3089f87d6386fa9e0b0ccd6812037e73411f1aa8731f2b5e6ff127901a19d5ab76d416115c4953175838e0a5ac9d623299d20c7556a97e8509eac89eae0a37a918233e3f8e1a9f249463752bada86bc263a541d83a779540682b6e9eb0f4fb3d6a8fbf3bc26ea6de0e68d49ff7e7ac0942100d2c39420229c48dc907614442e08832fd2660e3c2cab599673d70569193a9ff7f491f819ff2aceaa5141641592e352975e3aeb2bdfbad052caed82e96693cd7b6242c4d521cb8ca8cad841d928fec39d2b33a1eed820bbdde4111d6317cc76e8b5a0f8b7cddc2f8fb9d2e588733515ac54a886b3d88af4415d4a7484c44570ee300d14c4521e97386d4b9afbca1918f05bee41494100c53a3ef8170f559427a7c5c3d986c68228fe753a4fc87794a95576a7a94ccef747057ae030453d21fbc458a2040b9cb065f47c1fc9aa7d613d2430d3679f501e77d451de52e35c8bb35803066cd9892a463816847ce4cde1441b6d05571dec00b23bf82a01b1b8eff83c59e938b6906423f4aa7770562493951ce0cc4db0de8947edc9c9ee71938adaa0c185720f4ebdee01db6046b2f50f95e7b973911f9357e38aa081a39e40199f254d493bcc73847788cd3094f9dd3c7fb6132206cf16b53fa895677010a5b01892326185f74c7879da70f00edff7d6f668c7d974da5ea110262"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x90, 0xb, 0x0, 0x1, [{0x48, 0x1, 0x0, 0x1, @osf={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_DREG={0x8, 0x1, 0x1, 0x0, 0x4}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_FLAGS={0x8}, @NFTA_OSF_TTL={0x5}]}}}, {0x18, 0x1, 0x0, 0x1, @immediate={{0xe}, @val={0x4}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @xfrm={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @exthdr={{0xb}, @void}}]}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_USERDATA={0xa2, 0x6, 0x1, 0x0, "71cc936bc5418efe2fea2af081d04db233b3d99b76c69262169522e8e7a2c13f22e4f745da37fc1b45c625b5426017d35d1c8aa6d591325cebea9a32f645884e540bcb5e6f2e93f53a37b50ece516260ecd75afd15774f91592e1742399ad6a11b1783bab558dfe4d5289b752296b5f91c02ea9abae1928efeaf840ada292a61efda18659181a97cefafabfe374051d5b7177851861e84b6ad6bfcb87dd6"}, @NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x13c0, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0x4c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0xf6, 0x1, "f794c4f53277af42506667696d128866dac48c8c2189736939ca1b7545f75a23f587b56a7b8382b52ee0ce10f07c23c98e3386c5708ff33f8c23569a64bebc7145bd0ae1b7deee49e64ea8a1b562a1b3a92086e4e191edb04bcf5f53fd9eaa444894187a02b9dc9e4c68dc1b5168cb88eaebcfbc5399aa4e42d5ac0b8d27ed8f5d85206e787618a376d356f7c2a2ab40dd15ecc38e891dff9230b2e1ec76830b2a34d5b9a0d2b3f28e3d9a1707a5fec6080b7fa5df8395e17a07f886e8bcf86cbe8a7fbf31b23687b8770b81feb980b51e102fb8bc6d42ed96c6066fb630328f09fb99a6f76fd2b40ce9e7f65f055d3b7ce3"}, @NFTA_DATA_VALUE={0xa6, 0x1, "8eec8760e6fd42e9e7db0f0fbafae9927a4fd0a5482f149fabfbd93a7998067cc8c2fd5b3a075bce2aacad37539cfcfe66cd35e5fcb732a7bcd7f0b45e1e420f0c2380e091e892d8cd7e0ec8447f051b19caee155ece060d2066493944b5d746ce50e96ee4fb53f2b86188609ba83828c7023041191187df56c38e2c075da442f0001ac1d2b2d470275cb7632818db384d71bfd6fb0ff83a429039fa09ee3caa068d"}, @NFTA_DATA_VERDICT={0x60, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x18, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz1\x00'}]}, @NFTA_DATA_VALUE={0x3d, 0x1, "28b27372c622d26ceea1e4d9c6e88926fd8440b15065430a996c003a2f889d6bbecaecb4e77d1f7e9b335f1e3d94be9ef2876140196e4928ba"}, @NFTA_DATA_VALUE={0x1004, 0x1, "91adf7df9367d0210459c19c5dbd5cdf7a2c1c00aca75dc33514dbce70ac78b134fa6a89833dc90af072307e8fbe6b85cc0c18634ac2cc9db853423f60c1fda2e45c7bb1f4f727ac35ba57806d612db645eda9e90e1271f2e278753874357d670c2e0ab08912d223bf110a3df57b62a4ad2128f31b58675ca30c2494b8803df84c72f0d8f83723f8ae2e1edc825cee8d06e5db74f6190ce97a2bf57642731f2ff702531a3c14b04e2b95789e1486d572e4196ae4ae9df4fe83c0ccd683476cdfed19d951d5eba059700968c57078316c43fdb73c90715852854edb1867e5f11c5fdf9e56f4a4dfa2dd22a111ff0037931c109d4e82966285180484f0578afc1e3e5e243a98c8d2e654aab986dd7a24bd714a6a642281fe97277310f1cfedc7577820248bd261b347ab2b74e12cac285b63bb1ed15e26ccb59251592dc078a54694dcc7134ddd3c1c6e5d9b7154006cf87c1fd514b771bfeaaa91f45edc3b8cda637c4146e60d783f28c8752d6b1bf25458c08c28f2981ff6a4a0a2f3cfce49e1b37566a8f83671324ab96af29bc5a1a34285b7a6af3f0673e21d131a2f74154f16668084ad1ee00f19bb4406fd22c9e3be2409b0bf7f05b169ed682a7285ad312a474183bfe7eb66bd5abcdbcebd89fd18e723c88e7ff12ec2e9e9120addf73266260720fdaa8c304a5ce509f002c4786dbf3f14badaf359c9136b9ffb861594aa6614ec00ab6e17c4c69a3cfeb257a780191b6b631ee1e48c766ef1da34d15ee624bddc2a44536b86c957e9b7b776ee55fd61d5dc38a4bee0172cae10b44994192ea8d3b9db464a51c6b1776b553644391b3d072dbb2665d63bbb60780142fea673ff687c8892c4446b2d521c381b06077f7ceded44d0ae530396e5d9e3cc264fc24a0f66bba31068d41e31b13a5316eeccd8b16ac5d42a868cf8cbb4986885ffbe36ac9dd7ec3b2e399e807c5d2a5a3dc0508f971f11c3888ff4b83c7a89d611c8d493c392a2c5c2732a5ea5d5295a53533a5ddb27438f72e1ef745c0bb6d66f09c01c00b178a05de1c56c6ff96afd17429bfd3a0a93065b3fd2287c0f32bc7bc0da7c296b291fef41b18025d0f9f9ad154ac38af4aaf2d8919c1d846a9145c05a6af157047851edf53cde690ca0145c9d604e4d8297d8b961e1bfcea8d4d5cef354557abcda81e082282baf474865a4b77c2a6a1b605e4fd5b60902677375a35d56d5d000a17b0f8fc298db9cbfb34810d721e97b1fba37545cfc3fe48f93272b0b4f4ea18b7cd9c7e5c391023e30e26e93a83632713562c202dfaa5781e7f2b5df258ea7e1428721fe4dcb2191e4a5d4d2ed1d5dc196cd22977abc08388ed77e06379c0f5e398bfdbc60d32ecee1969df1c72be38847656567fa2cefb7dcd0b9fb33944c598e48ddd48a39fdaff85b26af6f5b06255abd70fcf5e08549816498c6481c1a1aadc2a046d04a46f3f0e0857199fb2f1d920c576b2405d3022341ef980aab755b674c946c6adb0e68aa2de422f87048323b3bd70747bb613e063b63be5df8d9b2e1359f6bd1c1922141950e080f306d15a3944b2124b2c4dbf3ccb9a37cd6d8332524ea12b4bb5a0ef48ebfc9baf4b1e42df27d819ccdadfba5d805ed01653de6a6838f3d14b4b6d2e5918900e887f25f6c930576f578db96585e8a414c847c7dc2fb133a6e34e19cba0330bd0de1b50db77e4382764e0d1f3857a43d4e76b0210501fa74b4b2d70f3f0e546cf47b6dbdb23d22e39643a0551ea3e1ad5782cef3f11b849618357f05a5a8a96a66f86877e01b41a88823a283aeb692d24d46474253935bf178520bdf8641b5275d682edea23cb658824ad41c04b9b418342c9ef4e00637b511f92f2d55767b4c7331ef04ce493b08c97692e4d7b4ed0023aae9e98f1d00447740265f5649d661f8688cdf0e2108c7e7e1f34e9d01708a327a5a8c05e546cc46f5abbd8908177c2e7180ca0bb6c108971f4da0cf4ef9b020be6d03d2232e5ceba96dc2574338533b83fc169f07654e0cbd888ca0d9e5df7aa4627440eebc31253dc1413b0fdbb5ab68e4dacc98b11d3cd5a8563f1859068e0d82311c4740dc5e130dc167492ec527c9956a595a5ef7b498426b376a19a3ba9606e9977a22e5759982d1378c3789e48af40456b31e634170b767f75b88cff90ed9abff021a8e74aab48629838980f0aa08de0f762bd5ae808a99b70547edc46dc76869a0f9162cb119b5b3a2aa7b4418b4e55a456d31e100b00bfe0e72afe1c08b7d4cab1dc642c7258b40fe645d5b29d043a60019f524fc89a7c3ef0f1f7d138239a1ae68d8122ed4e42f22720a866197ec995c516ad219f7cffe78a02227a3cde816a0232ce4a587f7cdb371f22069f1a83eaa2a75184ce874feba01964c1536de4b3f7237109fdfc922aee1f410e8ba05098c0fe0e3e918b40ef4b3588c839b0affb7aa66d897e49de5fee113fa3ff2b0239ac9dda90c0eb3996d362f2b64b03ceee89873ac6b6a27fcc06a022bacf5727d9cf0185e502fda30c2cd840d6007b6006aed8c98220c716a271996b0af1b9129428491dc82c9ecddcdc0d7f6a1cc0ee00aebdf888590c9a70b713c68312b79a58b848f5acd063759945002fe130ca501dfdc8667610656b4a779d9a749376b998d54a6e9e44ad442d2d1b1a4a36c24d702f291482fdb10e879176d88acf125755806eb23e64ad99de33ba736245677f3d012636401474c467ac5135316a89b3152a8eda2768a5facbbb4949c0a5281c53feea1b63fab3ebdd51bfafed8f5440053065a1803ee7ccb9239136f6a74bfea4f5855d68fa7a2bc2671e2ca0d4ea7c1c1bf42862637b4ebdfae01d26743c4d351f6b48617cf65f6d2c172b3287a4f435337a43134f70b00e29fe5a68d21366139ce16551f926bea0b7779ecf23bd44eb2872a6241784c8ac42ea7a432e037d9691ee3a4ea4431350c93e8a18c0b25683e02a078caf1c734c1ae2cd36f1bc2f3c871f928dc9e3dc4911e348cf21862591f431df61ec9fdb13c7672e781f5366db41cc9509e6b7357cb53c15cad827b9a03d8a012d2c992163066dbda4f145ac66e6f339c3d9cfd7b485e7fc0eb09b0bd6002f364140c455bb82aa7d4df94f5464e18ebb4c801ca5d10190fbe6c4df982df34139d3fe66f2ecdf4990d811197f2682c1988ede71d9f3e9cbf7407570e1b3cb92a6fcd5ea61b5c48988697f56447e5eda3e80e1e04685dd1cf6bc410826ffec70f016e8bcd20f8680d87e906c68eb68a474410fe1ab9e1d7b0cd61932ac2701ab2006dd3a05f9e6404fd88fd3b8bf91960c147de5e4b6bd828e796e303d2a246c8efb29dde9e6c3cbe4d51c8fa0d00b2ae0be2f1ee879fc3dad8c8e2985fe4e0cf4a58b7cf0092152b9493a818cc47acdb3f03903425cad2272203131e78f245f84b198a494118a2496329bc628585e36026a29c709d886dcf0d06c9bae54afeca00a274f592cb4a5c92cc910b8f99e76a556a6bc0edd492d1f00b4b2e839bf402cfff9c439d2fc2bdb32a9025fc74707c437950cfe9453a7d45fea687f0f9534fffd3c68500dae5d3e3cb7e4ade2d9f1e2bb61aa421e0709caf2e3326b7efd2901a726dcac01cf26dd9bd45d6c15da07d4eacad49a23bf267e57c2d061a9510a34a328dc9acf2f5461a42207e791ac6f041a71090c6d1812e114b2fbefc4b3b9f86f9fbc93dfa6dab902a5bb755c857ccdfbe1894b05fdeefe7c8a10c4d45a5e3e303edf2d027846a67f9fe43d12dfb35efdcc4a2db780b011ed068f7cb53710e1cb8501af85471765a023aec8897d6e9e38f4ac055d40044d56e4e25a188265b2a1769d1324cb4403e28c8bd82564327a69fe03986b000d64fab2eea2ad2b6dd881e681cfd64477953e542b9e29f6444a03b73f40ff0f01f6b35a62b28d415834f0c0c66004f2015cc5a596cf65f2cef8c9bd142cd5f59a5beed7c3cabe121033238fc765d4b87457c44247975d0db86581c3dcbdbd92cb0da27b7c7b5f5713dbd351cdb1797d5ffe9f713206f46ae999132502c0749b16a4d71503cba334535be9f4e2bec0f6a07c606b076a3e9ee24aea4fe63270ecad86fa1adcab51dd8583b6b8c388d196d65d9ed8e30c2d88ce9f4e11da7d4a8d752cbfb24c1b3ebdffba3c27d23413f7c098a1c0bf3f1e4b43355fb169b85d17d580d3d050d8fce5fc69c816de92d92934d969e695c265336313d1c06f170f5278d0921c028a163268e805e6ee0cb9bdc956af7c3fb32d6aef5e119528ee7d08b6a7c6604bbac2798af5e2b84b40cb23a534827982cb10c343b25ec7caaee6e1f2d1c96143eb6f42a2d920191630e17bcec724ff80bd5924b776d9ef9ea4bef1d7850c773a7156ad2aa85f560dbfa9d97dfdb6b4db33c2672b4b70e04052052141afe8f7e0319c296ae6c58b85006ed1c9535ccbc2f6a595171ebd8446f0c5dab1936f92d6766767a09961b6ebe80197ca396b4776e28559a2604e9b42e747f8152e207f4a8c5d37cd71c81aaca9ac9eef295ec799ddb79db21a3b35244f4c26e8d54b9da073f2d843a2c51c45496f37c0d8368f45bd5e69b3f85b4151e480c827b1637aab7a1cb3719352b0ed719f5608e86086491559c98156b44004958d79dfb65d2ef8e4ea4e5cbad32334be25947e56cb7538c5282117b3fa9ecd9890df60fc2974c39b4d7675f538fbdaef7a2ae5d55e4539695fa1cd0cc733a17140eed0b97a13a23de8b70d396d5baf74e9269705e9380554679af72e6f8e96fccebbd02158197768751a9c1c0c47405554167e17457fc756345b2224a3e9a85b06093db83b10867108c5c84aae84ea2ba93a2275e5a73a23741fcd325d4e74a98acca57cb4d7c2c649993585b21c4cbc5497f6a4af62763ef746d0d096cadb2134c343c64e62933d120af99c1f1de3f535ec9349305307ef12d5132df2ab5ec6260cbebaf3033aa78ae3ed7aa11eda338b5715b68a4676d8f2db52bfa53d3e37ff1472c8aabc9187c35983cd7e9684189d5e0271771c6e07302d73ac6c4ceb91c5fb1542372c4e25ab249adfdad92196b1d5b8cae2cf3728dea09a57b59812da41e2fb87e160cf5f9bb899c2f3b5406b3f0a458035b9c6dfa8d8152ede863a2c7dafce6d6634e2d7ddf290d9d6aadf521063f0529adad4882bc8dceae3d6e6373746f84778996b3698d724424dba1a99c6c4d052b7bbdddf95b03ca22a3d29991d70cf3efd101880c0aec82c885afe2e72f8ca5c21901b853d64d4b666027e51dd9647efac9f940268523d965c1bdb39f2fe269082016948d23919a720560ea9dff78ff2ec16c96f46b36e74d4ee93505df0462a0269e5bff13d443c05f3f6222b90854625d4d2c400b05d1aca51d6dac6de3dd180349a59258563bef2a2d5c04f76235e1f178050df73d048c8b1864ad986d35bd17cd9ac8f6f1ce6113cbdb25ed9871c359e3953dc4043ed18a309a2304adb6c0e3a7629192d0f4e02f85acdd51ec0a2af2eaeb11df582e84203446fb0fbf699d29efa1af8e5b69b78a4437031716029d75e7621fe91be05e6bec650622d2c468ca202cad865e3fb85ff3b6b78a77e5a6669e9c696a2f9a9fdd9b212cfb476b866f274802bc7dd27bb1f47bcda431a2275a8f890afc980e0fdb667d2685bbd9d5f63e5d86d8a86d18ff476d14320b651b10882620e9a83b83c1a71a7e4027d759a163a4d8f13e5cbc359881e2cf36d0b01c5303f5936b3065842b05aca223524a8ac13da1ad190e3d595730bae0a947c342371ac541f2172cce6a81bb"}, @NFTA_DATA_VALUE={0x74, 0x1, "6a335f291c2f41777bc478841c66f0c2464807cd9be04fb60391974901162bc672c3f3efff91a46db08c6a3037628afc24982acea8dd48710e82bd79037bf1d35deee91a1ff352da51a5ad64e2db620c4a54f93b581649a3b0545ea4c42e91e8744d288f963ae099908436da51db75bf"}, @NFTA_DATA_VALUE={0x7e, 0x1, "0c65a6a75ce27a4142d3982aeabc1c82037f63fb29e11eced59ca5282cc91a3c28a715810daa5a50d5a2114597cb464b7d3fd9f2960932636ca9a8ebf81dcd48b9a8e3da613836e0d8ee828224b3cadf555a39f28b3316a95b2a70afc47306e16b177fb06e75f9e9b1d5a6d942ded2918c6a49df10201d1c44ca"}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffc}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}]}, @NFTA_SET_ELEM_KEY_END={0xac, 0xa, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "6a19777d5ac9db2443054cc1591d4a8dfb20b1d8bd9aebac5f6458077ec8c49ca4ae12191d6f48493aab2f8e7dee65bed30ba57506e4f25af5f3f6e0552beda6aa0bb687ea12e221faa9fdd44045c89d6be225c99eedd0681d11262539516760206a7bbc9531ed9b3582d851acb82b03bdffb942f89366"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}]}, @NFTA_DATA_VERDICT={0x20, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}]}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x8000}, @NFTA_SET_ELEM_EXPRESSIONS={0x2464, 0xb, 0x0, 0x1, [{0x64, 0x1, 0x0, 0x1, @log={{0x8}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_LOG_PREFIX={0x6, 0x2, 0x1, 0x0, '$\x00'}, @NFTA_LOG_FLAGS={0x8, 0x6, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x9, 0x2, 0x1, 0x0, 'syz1\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0xff}, @NFTA_LOG_LEVEL={0x8, 0x5, 0x1, 0x0, 0x6}, @NFTA_LOG_SNAPLEN={0x8, 0x3, 0x1, 0x0, 0x9}, @NFTA_LOG_PREFIX={0x8, 0x2, 0x1, 0x0, '\'#)\x00'}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x8}, @NFTA_LOG_GROUP={0x6, 0x1, 0x1, 0x0, 0x4}, @NFTA_LOG_GROUP={0x6}]}}}, {0x2344, 0x1, 0x0, 0x1, @cmp={{0x8}, @val={0x2338, 0x2, 0x0, 0x1, [@NFTA_CMP_DATA={0x1298, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0xd0, 0x1, "fac7b70c8b72c5f5d06f06928c3e8c614333da2c6b655e2a3f7e481de722867b006e2e4811bcef46db0ee1a8ec21e6b760e7efe693e484ea182851f8048d1a0727c6684f2631a7707c7b61084b2f0e3b007196e1fe40891bcd92a20c931708ef27d69d6ee25408cb466fdbaccb4831b409d15c3ed9b8ac85ea8af7b20a4cc8687cd71636e8bd9d808d50d7b2e292e217bd51cc88216f189f41a9cf415727de72b11658c5902376cc59f647d41760eb69f2cf3c6072f68a80776eb99c6bedd1fc27d8cc4d62bd028d49dd387a"}, @NFTA_DATA_VALUE={0x66, 0x1, "a1e13eb173831b61b073b580ff9ac37cbeb582278cfa44c91aa86fc4824bc7c38a823d163ee1843397df3c7d6f046e9aff097a010764be3d16a8fc03f43f68457a20e66136034dacd3241f45460d96108e92d314ca339486f6b3365ea0e044db9272"}, @NFTA_DATA_VERDICT={0x2c, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x1}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xffffffffffffffff}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}]}, @NFTA_DATA_VALUE={0x1004, 0x1, "e1393513f1a5151ff2cb8cd5f521de47bb384d174a9ff2010af89818df2949110e0d468ef011cfa2d9397a5000dee5d652ea181c85095d996a724bb8c00e4938b95495b29bd5daaa235e3ab44f5490225ae90cf9a1a2502d4496a183076faa4fef78ec55770b180102180d98aab885e1ef986bd34f33cdc0824bcad0fa1430d781b365b6c9886a93cb413e61b4096036dab559ee2d0834d4ba3744872754b927dcb8d37d56a9a72dd860ddfa78f6e154d52045d7460e3e9e599013631c52c8cfefdd95c064261eab6aab3d11e758e4eae10cf9e9c77d8fb5a2388bae1f9420475b8aa2b6d1b7ed71de67d309569fd30eb1d6af02edfc74843e82ca6febe4a09b89debbf286e3a2002f7c9b15c04b90936573c673a052e849fa1f37f2331f615d8fd9206f9ec59f603053095508250458f2b6fa6874a772b691c0cd5ed4eaf70e284aefa6378c302301b9997d60308ac49ac19fc4c1d6d54c41055d7a3d6b2122c4667c240190ca27cbb378f58cd2b61a2a9b4334862c15ea5e40f1c5442fa310d8a1a2b89bb7afa4d7b833876e7ba281af3495f799e145bb605362ad79534b09051c0d81cd9fb94b2d80c767fdf3aeb06eb0cf3368c09b776a39d2d384f86d500d718c33a84236166ba7f3d066aa9ba5086a36bdbca6aa804d4ceaef856b74469b54c279e961410b7357bbb23a1ad9e96203f967391c7fb0fb60a4e947372306a691122c71f26a0f8448fdcc764fa3deb232dbbfe6c64ce87b4f62fd4cc4993c678a920cb9ade8f9d88df24bbc67b4e853430f7f0f7e0a7bf3a2a167d351892bf4cf7f986218f9be4ae00ab849d89cd4d31ef2064d7003a032a5b4ef3d848f6cfecfd59169d7f6c718cdda529c1cb2dd5b117ec7c3f492d51f64cb581ed5e3c4287129334655cfee23d891f1a42a06f5fbf1be27c1f9501a172371f396b7b35bd545e630cc748b691a9a07b1010a75a5f31215af231ecdfde1ce04ced7a8429d285a05473f81c29e8f77d17d8b0ad95ce87945db667602b9623678eee596821d596d200045273a75c18bad9ef9e5a14efcf443ef811d36fb42b7dee207f341b5a135815010e1495710d85b1cdf298bf827ec68583baf91c4a097ab1a1e3f1f93bb47f23fe50571b693b961495530ae7a4960745a8948628a31633b12e0ddae1072f6f9e037fdefe9e2c7abf707a7603b01a4d26d95d79d3840154ff69dea7426b38f1078e9f071321dcebb753bcd18f5e0c44275e35221f0e74fbf0e72fd7a42b67b811c8d6355c22829b9d37e232dd67b22d99edbb80b61aef13665dc9b382e40ea97f6b4f21c52d3b1a0d124217c4e1d7509a807a2c0cc6e7715451c232c3a0c7036163fee4599b15ac390a18dc36d386b8e8c74546b20df813d16ad2f1ab2fe8b31e093161bda7195bde66988183151ef65bae18be29ac9243927f244eea66b0975108f3b69637c091edcd5b5a4af667169ea10ac58b490e3acdf154d799969b41f119a72c6fd6b946811c4bae427fdf204afe58f86be027da8e6b4ca7749cf0bd95dbbcb54d5e2ec16752cb9c710136c5e33fcb6adeb4986b1f32f2aad07cc13d1445fffe721803ee2c64540c826598e4ef73f35d8ec827fcb0739d40a7f8073d0fc23cc4cf97dd99b7c7527e6ce85c5fa919b97de4864c9918c25074e22a5fabb3cb6d4765641b9a9803e79700c4c32f1e567b7934ff788a0de56b48d7a1df306737ffd6ad89949cab162cf00874c8a37fa0bd1c365fe7cb2e2668451ebdf629fd5e144438ff87073e240b32415a4bdb3f3dae33033fde389549e1de3ee763820322e6df4fd35d1d53b3167eae1b980b8992e5c394a3280d8f5851853cc096cb9a86bd34ae82c626038eaefaa6212ae9086879d2b2833e3824ea846d83e951ef34ad620054749af1ca2cb5619733e13fa5b7bf2c7968dc1cf1e696e6e684695bc6e14460ebec4a4a3911cbb23d5f4356b0df4f1c387b92c58010565d3846e4bce34fdb0eb6ca5e3c92138e84e53dff4cd5a4585577d5db8ec7b59233758c57fa6328f8b81c01010c19833864350c677675013530db76c91af476ab2a0249b5dabdad5d8eab01db3fddbac6eaa6e3f6069eac6a0ee512859573dafdc11b703cea7c45d6093eb8eb7503105945bcc0eb9007a8deda5f1802fd64f7e79e264003bb367e6f63604e49eb5ad751a6b016380e127d4ee3963754b17f25e5ea2f751ef97f88874f6eaa5f610798348868b3898f82efdd7f329696a780e6704a1a8ed438f239ca3779dbef0caebd6c1782a2dfad8d5d5b9bbce6b83d70ee6bafd847db3238996e45643a0ac9ea4d40e1dd5daaf7fcd1b6dfb9fa17084b8de95548d891346067fbaee2e1b213e1cbcea8cd17673fab3c3836eeeea4eca7eebaf321e5fb95189259a5b396a742e183420344bc423b2a3e83c76ca47c103f0c3e18e0f6398bbc750fa748df54e57f58733d9f9b2e955ea10aec56c30e8ba37b0b6ee2ae10f5c8cb6129a6b2bc4d5cc6595ab977dfe60425c3ba9f8b52044763854b600c7caf5b64f0444e7ab2aaf14c9ee852f96f1211a1f7a462e8c857c0b62039589f46af460243ec7d3ac40fd79c15fe6ee8288e9a2ca4bd9f46658d877fed43465eed76dd9734066eb9adc4cb9c18b1a4666a1376a07ccd6758047446808baafef624d2d0742cab6f0a041be0876d19bb604364d52e6cf2f88c379a223bd27db27419458eba06655270102a96fa889f7663b1999cea3aae321129e6fe763942821ef69bed0d3c9cab89d06f195d4042370315915cd7bec31e9fb39929886a3897ae18192c7eaf7fb3a6feb5220ace6b78245cfa5b0b814822db94e505d5a5bb84845801d529afc1a6b50f909f21ffe0ace1eae6aa2110e90f724173f2ef09cee70172330efcdddfcb90977b977fd7ba57d782e1b080a5cbbb0785a711ac76913208d42ccf7f3092b64e18960287a0c71d4e95d54ba01128ff02c037dac56d317e9bd294c46025e3768acb789de0b59a1431eee52fbc71832e71a702ccb32a4f01b8119081d613dd0bb1335f5ba8273bb136f447501f67c2d2c3cb7341d6619c8eda59b2fd5c7dcb9d47fd4f2256c43b313c039c356c29390bf927a1010734a32e5aacb3d35711b47a112dc8946a8dbbf561cc83e948a8f98363b7415d850d189a02355e3acb1d230d19599de67a6a479642a604fad34e5969dabf4a431b5007341958cd6246aa6429e4ff86a0c47e72c2ebb3f34e1d509ea04ba1a77f5a1511d69887037903c6d48fa6c532f744f24e8b394cac7192b43a8447b0c677c598b8b081634f66b037feab1ffd04ff94ea062c509e22cdca5b8765b4d9bc111df683652e446e90bcd9b7ebab402d0e242016b94e81d6ba982ab56ed89f02ec233c25a680e14a5a2ff0bffdd11ca84f5bab7d4870ca2342898a76c5d40904157bd18fdb07d64a49940bd4fda6c3930f57c4c074a768f3ef921a0d1202d574bf1f9a1a8a1eb2e133696297f2a9bdf33d29c8e909887e23644024890c8e95744910057ca21a449cf815e407f3878f97d8ba9ec878dd771a7c97ce05af34b37bc9b8e81cbbaf9834cef138a0d979f54634cbaad67af5b2f5a79cf4ec8553cbfdfcea170091c987a1008aaa0c2870babb04bfac4c14ec39d3db37d6d564334feebb618d89fca342691b111277e182fce2c1363110dd0d8bbe4ae8085ada4ff4f2daa5406e8a7462735c7d8c023c655578ee10fe93654014fe8fb195d93a3d1f330c07f307282b8180c39867f655f6442682b3dd840d397291caffebabb783782e44b5ecdd761c9d9992731c30b4de7a1b8bb642fece60ed11fa6acd0ad7cdde9e687c99436c574c3d6405758097dd8b636564b9be4c78f3e29da4fc8109a9ac8aa65778da911b190009782d303a4fddf11c705bb6a3a66194bc5fd8c6cbcefac16e3a472d92d2906466fa636b5dac73e5c29de41958cf3bcd90e83a3499f12b065d796ec93127b9865cc9417efda32c50587a960c725e4d70e9a9742d7fdfc2083001d8365e5a3a649ee85ad3febdc1266925b913cca2a84d3c697a7c4290cd4ffe214d752a25caee04f55c323fc9c6a5c75918134a4e77b080bd0c2c4dee1cd4c2f322c53bfc539df25696158fff601687fe568e4064c7f32387c2ec3e06cccbfc1b6c9e156088d95202fe96cbb48877b32ecc193e144f5b63fdfa71f58e9a8b298249a82d12cf8ddec422b8a617eadcc298934281ee2df6c58ecad46926d06786a19a503f2db35e028e54fa522641ebd641d4b14b6b4e5b8983eee3043343e37cfdf026ce89e0ab38b834c66d9f520a29330acbd50bc67ff68107856e232132f56ca87e280ba07f3703803cde57f4583016526841af4b1dd671425dfd931765705df74826b6fb4586ddb177b50f779ac2deda4103c564b6fc6cfe0a08a2299aef413274c354e7774e750a53f637d7020ba8f7c87aed2b728af9d1d185fbc7c38fde59a9dcd1ff884b3ac934ad089911946f4f47b165c03ed5d92bfa48cfee2b880588432192cde31435ffa46f3ae99e023d3c39aba11ec3c15a5dc8d6b0b7632d038d569da0879bd8fff036e74af7789c1825bef832015f3f64ad6ce5c55af6e4c59fbb7e137ef19f3d8510daad5d632b64259799b1cf8e4dfe3a1c7dda5486a1c32f4addd8f4e0de95c4a3c17e4edfa9bfecce97ab9bdb15f5c31c944c2dca76c286772311392a856c119f52f99686a7af1ae4e72bee3e14238499d68ad29abfb8f0f3836aaf4485f3b17c9dcb8423e5dc14aaee885ba7955f948d36afd0e5d519fc6d13a429dba16fc85ba68c87fc38c36a8b33ff22885f78a9a455114e348b6a7228e02463783bdc78df74a25c104e25dadc081a42d931bf5839147f00d9236c959da1ebda6f2730ad2ab819f64751312954f5d9d00a5c9eb242453c7ae7bf4fa2566f769f23c91d8c8d929d567ebc3516d7a505510848d52c4e24f41fca7bceed51ab5bd003a5c58a2d6c55ebd2f53b43e0204606a2895219b3179779498e88024af8128b11813c8505c1065217a57ddc5d5f1d3468ded917ba326bf96851e35809e167297aaf48be69347f28f0530a05085551180eb940407ac633612d28320f5a129a79e0cc691de6b02876b8ec847a3c5ed1c628d874e6b5bfd1bf0111d602e324df33846fca26021c98aa0a28efe5d178ea24206ca8e5367ec7cf8d58d1319296d47e698befb5880c49698e5aedd2c3d82b4239f98e2a66729c17dc7e356df192f35fd9a6e3371d488c19d3a7fdada96316a2c6cc153ab5cce1b4d59893a5ecb9dd411e7f66e2a87a2c4e7680b7181d339ae91a2daaff12895a50e64d15eee3a9949d9c330219d946be05a429bb16aec76b8aa5dcd1cfd2fd4a8173e87d7e7572fe292ad8890688fe01257b76bac1adec16e65c7bdf055bb678697f1b32c469fdd6b90d58df0ffef539d495807792504bf2b668ef4ca5a43e8f660db5f2c5731bd442a73c3e611b2813968c915dc9990bd228c7df06759bb104af187c0bcf481553b294ce9d28a823fea168b10ea49a8ec0ba498a32e6ba5e4a5320ccaf5708e2a204a98c7e483820155e772c73c628be7512828d00799c02eed429374bd1562e777fd83daafab83206bffda892e05c79eeaaf5394b707dbd1971fefda46ac14d9cf254268460e14254ddeda8f32ed9ebdc55a097290b37c9c850c9e102eff841a70324e6967a73c4e57a9f0c17e1f70dfc6dcf5b9b4b2e4857973910cf58b89a91daa1d689fc87fce391e2a501c2744f42f885b99acb5bc568e8e4cb191131247"}, @NFTA_DATA_VALUE={0xcd, 0x1, "7e91ce715b4320a797f4eef01e2751d34e314f450547dd93dac1bf66c3e696a020a2a626b4909d2e5c92276dc0955f178b2915d15149398c1ab96f3ba04640c4692032c549658592e5549bc916de6475b142d57035c190885f1a83b5992389c1b39774aa62ad1b7bc69faa0cdf9dce90e7b913286a93bcde8b33865eefbaae729b47997b0ada1782027e4a0585d1460aa99f107d1d3bb76052bad631acf6f5d8d066713aaded375d3ab61bd5efd1bbaefb901d236beea85b5be5a35d1804cb107713356aba7b3c6b82"}, @NFTA_DATA_VALUE={0x13, 0x1, "cfd0fdcb1e8d46a1974ea6dd614b29"}, @NFTA_DATA_VERDICT={0x34, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz2\x00'}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffd}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x7f}, @NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN={0x9, 0x2, 'syz0\x00'}]}, @NFTA_DATA_VERDICT={0x14, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffe}, @NFTA_VERDICT_CHAIN_ID={0x8, 0x3, 0x1, 0x0, 0x8000}]}]}, @NFTA_CMP_OP={0x8}, @NFTA_CMP_DATA={0x1084, 0x3, 0x0, 0x1, [@NFTA_DATA_VALUE={0x79, 0x1, "c9b9e5ad8c77f45e3f25d4f551fdeef79e81cd3cfd64184d881e37f8909bfc2cefb8247876ac247051e5705707de54c84e8b2e78aee695478b8012aa637ecda9aaf8500a8e054d380aca1dd0c5af81122121dc3e472d4c9baebf044e56136a83e639213cc7ce98d83ea897ee575cc84a94b59b8d08"}, @NFTA_DATA_VALUE={0x1004, 0x1, "d4da619abd76987d05a35ec6885b01b49cd4553eaf517790eaa8503443ac0dcc2e1f9d8c3f6555c8de78919f53fa36c1eb7add93ef3cfda6a8bb4c6935395630f60230065ad2c5128e3918fb658c1819f3b27f2721e479f621d9534da58a730f8a0e7e3fd0dd8edd07c7bdb77fbdf327bfec4ecdfdb6ce90f5c1d8a33a07bdc46bd5f2a96cb7a21d9fd6423a73283c8985d4a76394aee7e436cab3bce98ec7816ea106ba45862dcd7fce117660013316d77f5a1a8efb611367991d44195e410498a9da2c85189490755542c15707f144f841049e7050d0ace0a7bd61e05f778ab22fdd3aab27f2732577d448060be95f4a8f1b7c8bb6ab8287c159b4ed7307be7652cc0d0b2005ea5c1e90e47ad113f8925e9b15323121fa2b201e24653267e1526b653cf7c379e223a686db67c90afc86dd9239d4d8e15fa3f5fb941d9effbb9331848632ffb91ae15f1c9e230c6b0226ebdb016ccb0c7200c59753b2816108eaae617bb83f07e5e4ca34ae3349e88b14551ff6530547495943333ff0becdbe59cfbb1ecbc9d55bd796592749176909ed965e2d4f8f6b95898562ff2e4ab2a8fb99d13efeb0418e4ae5b277d5f0a5454c31d55e3c847dc68bb2335d373957b7bd692fb93e2e2ea4cd99f45e25e81062445c8f185c12ab7547271f4d53c3252623dd07796729367e38589c1ca005346cdcb01ed5d92cf52f71899021d12543e2f0e68c476c43da24f933e6c905d3e45fc554a7955c964e7d9943b3d86ffbe3be5c112c45bf2b812b89306e562f305038a601ec775e199fb72a50f3228a26ce59539a48151f0dfa1d26728ed3d2c536ed13effacf1be89a472266bf6afc837b40dc88859977044912165f8c1faeb435fddc3fed11cc30cff18d34fe785b401616aab2e15f5ba8ce25c1eae2863a277fb9383662aa5bc86f7d1723008f619f485f0b1e9cb8ff14a1facfa181c88b4763a55b7bc1f22277a763679d33580248002f54ca71a63bca61848e486ab21a2a6116a40b372c700cbf2466a53c2f65e2ac7c373605fbd0ff2f4a4b89629375904dabfacb409b284f0e6c49ec1586aa6787a20ee26b645b06f974fd8200f023043b1b614a001b42ce5741be1677ff2847d77d089f812dadf027c4d2dc040be0613fdcff60af9c968fd862d347914eb5a7bb0368843c979e508cc52ffe98fa2e738619e96f17d3cbcb9081de80277b47da84ac40e0fd05ef20d2f765b34a393b107c0e9f789db8ca99ff74323484f15216958768427ce5d44613f39813106541663d9cf43cebb5373d08711bbc7e002539e312ebbdbd95a914e350dc33c2239bcda26e4ae71f09840c87a2c066f43449ec4f4d54bcf67932295d8b2d9c4c66a2ead3158574c0c4ac8213bac8c2885b70e8e8bae55f2a02f5456896bac0cd8646cfeb99129663548a3c94b8d0ae8890dd27394844cb093e4c69dd5851fb64ea7750acd3d4e340a7e8d9cd852631df372a7abcf88491043f4a717224dbb2749ce738a91cd997b8dd3f58656eebf22958ce068763b906da17c6cc0d10ba6f633154e91497b4865915c39380fd4fbf20691f715576ff76b68317a5ef89b5c54a2d4f2b394900e875599b3d9d6c3b198cba351ecfd81fcbbe5de34cc489a5a08ab322a34e3b8855b9dafe74fd3d438b369a8c10c091373c520ac3325a2ee4aa8f51c65b27f602e68cd87d445c05b69c9dbfa7170d7ff7d081960dce1e9d014c85a3e87eea2c8d41cec3aa011beac02c14a8a7b8017ec766d31c6d7ba8d4a5ab48c282f0a221f0480a85ca65c27fbabb4f2f268f2ef7b7af4e63692c36635a517614d841cd94c305be2cb6f3cc45f6855466337780019d458a7c724a5d5ccd91e3fa628b906004bcbd744c0995d9ca18e1da7f93b5e052df9389dc6fa3b1f84e869ee91bb1822ff9a20366d43856fdcff60e99db8b227920ebb41cbd087e7f4fb0436e5a8cb7c1ff92f9526fe7ada02cc4737426a841e88215234ac301704840246d1b74face39cf6854ecbbae97d136ef3f5adda6b6bfc5ec572b8a392d3cdc85640e1229a9004e101ed553b439af0b26d72b4a0767d6ffeb7bf9503cc2b901d5d9d02325acf795884c2f0d4351d1853ed7b97d06536e0926a209bdf4b3fed191560ee455f5bbb21b0b5975edc49124ac6efb65038ddeaeddb49aadb69fb27cdeebfcc9fe79dfd7b8d26ff0cef7c94f85bf01288e2151ba7ff055137efae242ebf750078c05755e0a36fe8504261e55ec54e24a4576bf2ebeb3bab4a05099979f28a96a2dc776b6341ec57128736081df64c62264f54d3938ee6c442c8a5261ba6d5e46af5f06bac7d38dea422faf5f86dabd2a531148c63edc6ac593c36014c9acab1d4282d9a15f557caf31ed637198f373a3d8b4d514e62e20facc6907346843296f745b3d19dab0ebac8889ed5c46a5d66489281fb7d228c4c5dd69fefa3875036493084f4b5987b997bc77dc46bb0bf79ffc4478b6f26b98f497e8f25b38607b08b98fa827e4251707fb383f346b1d67ce768c310dbdde3db4a4d16e217572c9225b06736b875af8f7b36d3c503c86020948f549606dc6a18e8dd0434ff26d3eeb541c631f7d29f270fe3f26f7eb92af5185536b497b78b1a75aaad267186f45846c36bfcab489187c335a0be8c6c5b8d762e576c5199e4afcd583c5d795b7a593b627fb0c65efe1398a220fa7c7ede4de474da590363a1eeb4610c09359e9664e8a57ce063b8be6ff9e5ddb42ee484060fe95626c2a3cc41f612a3bea10820f242507ad77174d025951c939fe43011fb591ee7b28cdf76ef42b07d89da0b400621c4b28b1b02497ab737d96c36dac9caba67c8a656fa34aba0804b701b3d77ff26d3513e00adf27478c681ee19b933d21be7854957f0faf51353714c5997a30907876db422b9232f2ee53dc94f8a0bdd18a32055fe680aa35b96d6e11290caa27b6408ad2ad9dd4e4df5f558127aceb443ec3268ac287e6e624fd10b84df977054740068754d4ed9e6becce7ed513d448124ae59c7e4f37cf1ce6d7ead4c38eb65141bc2f98888e19f39ed24a9696df34b408e27afdba0360346c65fb9e18f471e3bdabb0c735c022126566ade83c34acc54a868dbaf53f864f37f57a6b98b36d28754b978af0aded93c2ab1a4ed6a4731a7340e66584922fe3a42af46ba147ce1498a943fc5be1b123da665f785ede53746412678e1ae92b6ed392606f19c9df2e7f1eeabaeb537ca76b7650f4277a58a64249fa0f2adaf3b1949d8724c502b27dfb922f466bbb37da22935b6a6fdeb9b8982dad6aef58105bed4e150afb0551dada7d5fe79f1767ac95b32a1c21aba66c2ff67c3d46926bccd2bb8ee1a2b4a4d4e7eb0aafd98ee11f63327cd6b4c8ed91794ec46592899170d90d43f19e36304e86b56cf77646eb3c58361370d6ccd7ab9aa22af639171ce936eec5b8d582345018275fa3ef2b029f5a63508d4e697ac54e06262879dc1bcce4cc7e1c2dc1a0e384143b17d37273e15a20bc40236f15539f3a6435d5ed8270589941d1f3695b9b157fa5d646bd1c52f34cf1e250808f1bba28df7d1a4728c2e36ec048deac69ab436b4df2741988894a1680a9af7ac87491ec49277f0bb5a7e35586d2044c9f5f54ba614811832bbceb92e088447a94a0f8246026b45fe63fb3c0340f6ad2fa05e14addd2029bcb8df2b0bbc1a6a7acce653fed02325a1f6c649667019c1419db800f005b648d1fd3ebb357221fc4731d287d6b1a5f2ac3193429cbf6064540646cbb7957988477e54c6e05e4ae70c06254e59ceca2abff59aa6a60201afe8317366c4fc8ca1df77b3eb81572a31ac1f776a2e9e68ad650ffb013b14a0553b1bbccd55bfb151848079b63a292f175d3ebc821938a4ea28e59b945f57fcfd9c15710cfd2edc3eee12143a230d8b0271e637d907c60d9a7f189b7eeeb09d7c1634dc76af0153d3d5a8d289156fc7d8b72b432d0254231fb69c07ec581195c1da23dc0287639d46c9e7753fda35d767866139f3b1fe5da4167d0e6dffd82d0128f2c4a3b21bb086682c1a6133992eebe475f1748d8f8bb2d0a9f205220d58b85d52cec4ffad855ae028be2b48b8e7bf4bb5c257a1e2b35583fb37072d81c25d1416eaf58c3d6d69704e6ad8f943bd5c0fed8921a2669afa8e06534617800eaff4a9be0b3cc3c040958950cd48781ccc06bb20eb3aa955b35e9c937cd9ab82648eea8e38152c3756cf5b4312ce96612f2e52a09903bed85a7882214df5441c822c459a0341809d13adee790cdb5cc4ca6855bfa3bc50c04e0d81810085b055ae477378bdd346b8cdb464cec087c3a735187b253ed9369f2580a3e5aa7ae84bd1d6ef6c96ee7f8b4a4ddc4e13b05a48972714277255e4c2757dcf61094ce97c7d2137c92451166a3f7935b1cd8c14e02602e49552ab93b0330286630665c183f586a8acb643a4d1243b654b5a1fbcf4f53fa6718357679e40b56e5e169878fda1b540f1687c230af61142a4551fb1c96b9723a930f7f7836bdd6b8a6e19ffe0dd0c6df226fec28047a36d215ef231fa796ceafdacdb6fe962475bc2c74d62495e69cebc20f0f0f237a08d47c8eefb97c9a6517264e9bb2ed398529ddfd23f0ab535652e683e7d472fc8dd49019c302b18985183e3d21bab5d1938952f0edd0cacfbebbc84a048f29fc5c17d0267bff1f1cdceeda75c4bf42dddbf75f9d53818e250c5388afe9a9823a60a5a2fc39a0f9910836175cf8369b43fe9609d6bbd47e4aca82114a12223b08792099416d7fbee535910d0fa6eb0d95804b4908e2981065adf5b4463ff7795dbe50f88763d9a3b10c9c5af0a76c497b03351a36ba58edf291ba908f6f4be68282a9e285c6fe07412ef44b366c9d721732fb0943d3cc2f25e6bc76b4cb87dfb97f7dcbdf738ebd5430dd27fb68a8560f1a562435c4afa47dc3baffd5a60cad99af5871b70252185042c266e13b4567622d794755aa148bbe09146fea257149f21f32b97139f8b0a92be86d0dec977d5262c136091aae02fb2465cc0b7925c30e66a05eda72ba913682cc6e73df5fc513c593e46adea2470dc4bf702c38392b37910d2349036574734f16e3c49d6e500a5c7484f63e02c2beeaa96a2f66ca70792b8e64bfa2087b03a5c4bdd756170213208a28f546c864361dc06e417b32c754e16c54c64256ea971701e347b03150839c19f8a60b21f611a8d60a42e95b11263fe4711fdb43ca5f3408e0eedb96a3c378933eb11c8dd01bbec1395d9d0f9405724fd4ca91d52c5d6f3ce62ff0ceb80720afe428f7b2e627dffb4262487aeb0c64cff7200514039fe5dbf0764f8a17037e91c2f847dd6e6d60594a2911eaa67c8c73ca427240cefef99c24ee635c2f20abc614587046c976894e585935f112b32477b15a28c145d0abff32820a104939ca921434fe07a02e2244db7d8203dacde74222d4235b3cad25ea0e0bde2573b1616f0b4639afb8cbc63699cb980f6bdddfed8d48f9ef8b47d629e68bea1c9b03027d7f528f6dd839bf6e0ec1667f145f2af0b0a4f466e44124f7363300bd10a24b410aa748d07bf6eaa0b0ccf0326bb0293e15314605b2fe47bf320378849e0fbd49bb2cf4598d95e14653f210169e1de312d4ab31da7306accbd0e048aaa51450bc57b763a23ad95223822627692c7758193747019a63f3c43aece9ce92f5ef427e06b178f3069459756a5fc1350c99ac611689f64f1e9c61ac90b6c68402d97896b69d0ea166b5bb6f51ef764215ae9d76a687e62d9bf954b049"}]}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}, @NFTA_CMP_OP={0x8, 0x2, 0x1, 0x0, 0x5}]}}}, {0x3c, 0x1, 0x0, 0x1, @payload={{0xc}, @val={0x2c, 0x2, 0x0, 0x1, [@NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0xf6f0}, @NFTA_PAYLOAD_CSUM_TYPE={0x8}, @NFTA_PAYLOAD_SREG={0x8}, @NFTA_PAYLOAD_OFFSET={0x8, 0x3, 0x1, 0x0, 0x4}, @NFTA_PAYLOAD_CSUM_OFFSET={0x8, 0x7, 0x1, 0x0, 0x4e2eaf5}]}}}, {0x18, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @void}}, {0x48, 0x1, 0x0, 0x1, @dup={{0x8}, @val={0x3c, 0x2, 0x0, 0x1, [@NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xb}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x8}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x9}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0x2}, @NFTA_DUP_SREG_DEV={0x8, 0x2, 0x1, 0x0, 0xf}]}}}, {0xc, 0x1, 0x0, 0x1, @fib={{0x8}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}]}]}, {0x698, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPRESSIONS={0x16c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @masq={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @payload={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @reject={{0xb}, @void}}, {0x28, 0x1, 0x0, 0x1, @dynset={{0xb}, @val={0x18, 0x2, 0x0, 0x1, [@NFTA_DYNSET_SET_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_DYNSET_SREG_KEY={0x8, 0x4, 0x1, 0x0, 0x4}]}}}, {0x58, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x44, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xf1}, @NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0xa5e0}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8, 0x2, 0x1, 0x0, 0x1}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x4}]}}}, {0xc, 0x1, 0x0, 0x1, @cmp={{0x8}, @void}}, {0xc, 0x1, 0x0, 0x1, @dup_ipv6={{0x8}, @void}}, {0x70, 0x1, 0x0, 0x1, @flow_offload={{0x11}, @val={0x58, 0x2, 0x0, 0x1, [@NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz2\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz0\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}, @NFTA_FLOW_TABLE_NAME={0x9, 0x1, 'syz1\x00'}]}}}, {0x30, 0x1, 0x0, 0x1, @connlimit={{0xe}, @val={0x1c, 0x2, 0x0, 0x1, [@NFTA_CONNLIMIT_FLAGS={0x8}, @NFTA_CONNLIMIT_COUNT={0x8, 0x1, 0x1, 0x0, 0x5}, @NFTA_CONNLIMIT_FLAGS={0x8}]}}}]}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x800}, @NFTA_SET_ELEM_USERDATA={0xc8, 0x6, 0x1, 0x0, "eaad1451f975c8c6abeba6a24f95c9a0389ec019b5e07fe22430dcdbe301b4e1a690a1c6994edbe259703f6889a1de2ae21391e565cdf277bb0342de8932b07fab736d688233d6f97aa55be53aff7f597f2601b4937ccd8e0578a0d4cff2c67aeda945584183b8b4f71af47931dfcf1b6d5190414ec4f86e4e580341a9b3e1cc2e03c7ac2d69431e2aac73c270f1fef5ad16aa3b3e442cfd12c074be32ab5aee2a2f6ffd3efcefef88df6b90609f0e80ba6e67ae4a825882b0bcf865640ae5f639f501d8"}, @NFTA_SET_ELEM_EXPIRATION={0xc, 0x5, 0x1, 0x0, 0x1c8}, @NFTA_SET_ELEM_OBJREF={0x9, 0x9, 'syz2\x00'}, @NFTA_SET_ELEM_DATA={0x1a8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x51, 0x1, "c43dfab6100cb89fa6bb94a0dd58ad8d9888ed150298b532d2a09a378787522ae89f1d0ad91e1c9334b47b8e62d7f2ef7fb41910122400ef8764a8203e98b437f30fd73298e4aa89e831b6c4c2"}, @NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8, 0x1, 0x0, 0x1, 0xfffffffffffffffb}]}, @NFTA_DATA_VALUE={0x71, 0x1, "539bd3f5f14deff0806ffbb9d244786b2beb7a23baaa49600a22749529c4dbb967d99dba26d4a1f64ba60e087b9edd9cd639a650875acd29d362a71c5ab2427ece6572c310f10bcd7d00fbfad7b44c7e531abdef84db16a993b0a1d3cb7270cdbae44289fd0ef8dc48cbbda471"}, @NFTA_DATA_VALUE={0xcf, 0x1, "c42bf7c61fdd643b60e96fd4ff48f5f5072d0a340690f5e64a7ea93714794789cf79238f95e86aaeff9c1654d51d6f17571e10b854f88d5438e4bdbe40ffbb6b3e7661a77f2ee4954909ca59f4aa0d20b823abb4569b3324c456fadd0282999c9e62b053a49083c184b11522ba7bcb7f83995748f27c89c00ee367aebcdd7516204d8fd87584bb2611a8b153e54170141e2bfc2d4f2f2d72f264cb2e8e6feadfe180be65971ffbc1d324411fa9ea2eb065d0cefc42d5d2e91f80cf662b4e3079d658e6ee57d6cb88323958"}]}, @NFTA_SET_ELEM_DATA={0x1f8, 0x2, 0x0, 0x1, [@NFTA_DATA_VALUE={0x7b, 0x1, "ae45008c9a1bcb1fac9054d6a2d7709a6f78b6fe1a93d5b0039a6963f24499dd69aa1b782658aacd3a3dc2b0ffc38b58165ac21c12b016add41db86d705007a29618327bdee906df17116259bd4b83ca896334a381eae1c75b14b7d341cfbcfe2b88d9ac10d8c5c61c65c56dc84bf798258a67f9597bf0"}, @NFTA_DATA_VALUE={0xea, 0x1, "5ee57b1ed31d28755aba93b88ddb278b5afed9f3b6d4804f5f410c6790d59464891d50fb3ffa837577dc0e15240731eda83b4a56c75eb06fb473f9d82c262c052e7e31195c046a553eec6d2e8616b6e12fffefc965fa7ab7f6172e8cf4b8ff9c51994c9a8923ab17319e5d043fc28ad1be83a67ef228b73507593a266211ba026fd7888d8f4a10a00a0a4281333e46ad6821650c7f8d5382086e67f3a182a1c69258df96284afb7685bec12b275414ac524ebb51ffd3df54dfbf836af4649751f50e32671ed661faaf864f5325a5123b99236a4ab273e5bd5cb86c51db2154648bc883c6843b"}, @NFTA_DATA_VALUE={0x89, 0x1, "4a9893eaeb636da0ff534c1cde93a909a562d366a594c256c9fc4340138a93b71ed58e34053c49b77ebd8dfd75a3b088bfbbe55c92ebf4bf3e347eb863d989963709f823fa7187fefbdc10d74a954c256c4317e942a1c99e8fab1b6eb0b42f6516a287d3855c62c66266cec349e8f98ee7a9c16b38ca33148ebcf457ba78b1ca2b57695340"}]}, @NFTA_SET_ELEM_EXPRESSIONS={0x9c, 0xb, 0x0, 0x1, [{0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @hash={{0x9}, @void}}, {0x10, 0x1, 0x0, 0x1, @bitwise={{0xc}, @void}}, {0x10, 0x1, 0x0, 0x1, @dynset={{0xb}, @void}}, {0x58, 0x1, 0x0, 0x1, @limit={{0xa}, @val={0x48, 0x2, 0x0, 0x1, [@NFTA_LIMIT_RATE={0xc}, @NFTA_LIMIT_BURST={0x8, 0x3, 0x1, 0x0, 0x1ff}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x11}, @NFTA_LIMIT_FLAGS={0x8, 0x5, 0x1, 0x0, 0x1}, @NFTA_LIMIT_TYPE={0x8}, @NFTA_LIMIT_RATE={0xc, 0x1, 0x1, 0x0, 0x7}, @NFTA_LIMIT_TYPE={0x8, 0x4, 0x1, 0x0, 0x1}]}}}]}]}, {0x1158, 0x0, 0x0, 0x1, [@NFTA_SET_ELEM_EXPR={0x4}, @NFTA_SET_ELEM_KEY={0x1150, 0x1, 0x0, 0x1, [@NFTA_DATA_VERDICT={0xc, 0x2, 0x0, 0x1, [@NFTA_VERDICT_CODE={0x8}]}, @NFTA_DATA_VALUE={0xf8, 0x1, "56c05fb1708c45de3ca8d262e9f99b760354bbc1eb197931df5c7457f3e1b569992b349d4e0a4ac8e0f89c10d8a7ece99d11b1b1eada0b63e1dbc653058805975e9c0ed9fc8489e6b9007a35e48bdb297edda1876a0e351c6a11bee9e90e0b5398c7e0419fc0bcf27091dd5b07a2b1b0a9e0c18ee4b8c6c20e0e91d4c53c4643daebbdfb7773b6ce12a90eefb4098d3a50f37a016d56416a73ec61b5d1799c4f993f044dffd5be91a0aa4cdec51464bdba7d742c341bc9e29495a614cd29b4aaa0bb53530efb4fba5d32596905aaf839e6fdce45a9e6b43755091e523d9dc2166ff070067e47775684b5bb604679f8da3a86db69"}, @NFTA_DATA_VALUE={0x43, 0x1, "1acceb9bcd5a644a15c24b62a857a52466c9f379e0bd7ba613845682c8df02a6f285d6cfb7f72e6fa596e3407e04fa6323f892dcd1b5a795b17f36e2f5362b"}, @NFTA_DATA_VALUE={0x1004, 0x1, "ca7c471a5714be40b1f8a4318f4f49c81921f2542daf8bb88f2202b7f9f2718850cd2e6660424f6bc7075ed0fc1d67c76f2c225d174a97e4b663f88fd937177afc45f6062dd1fca20963d7ac217fe1abf6f70951d49eff473b2b7470705682cdd622498162ab2fc17935a600ef71c5d156c74a092fa56928ff73bdb76c19fee13769235afb054dee8d1da2fd33eb5c90a68f6f0c9a03af8ec7aff1e1f8a1000e7e8561536ea5e459b0cfa4e01d36961fd38ec7364528bf4ed508d85625aed194184a27f76d610d15490648e48b4b8ee24d875448560f96e851247fde002222559b0b8c11b48a25b66bf0d5563dc4e5d04074936c4b47a76c532a335fa07be8599665e92ba12dc2ba79fef8655514dec88b764a2ee6ad765533b0a3c149765e3e1fd980a3c4baf569f537f565a1f38de78ca588f0517abb814b247bb5c4ded2d0609a60c0ac22955fc4af000968802401180df7a7f108dd6083e4292ab2aa85780a9306e34e5928e6003ba6aa54fc31ac4bb4c4d1ec71e04b2b8af390e9fe7d196a1ea78ca9f0615b99905118be0ecaaf8b172bb23eb4cc10fc9baf76c27031a06a914d6b840db97ea071bde195012e3b5e71ff62c1c4e64fcc9d3159b297d3e5b9139215361ae750ee7f54b61b39138f4e95bf542c882aefaadfad5db1b400398208dba856515e5046837de9e3aa1a5b3c99d9d2ba9a3e5d0b740f2793db87fb3f30e599eb92bffef008e693a51a73c2941d415315a587a0e1b918c551f0e1382dd68b0d7a4955cefb57166cb4c3571cb9c6ed05f94782d46c9b2a7b3b71aa8516fb4b1fd491f8c07cae477448a1af8878f54ce0f68e17cb8b12489c6d8c45ef7e5f284ebbaf488e582715b97afc739e99fce73393d911b70903f39f9dc68482797a1a24143bdca7528a5bf0ad50c6ec458c7cbed740d417bf91202b50d689c6f5821e58833b56d573d348d0f6a31b289774e3210a8454f5a91 VM DIAGNOSIS: Warning: Permanently added '10.128.0.41' (ED25519) to the list of known hosts. lock-classes: 8169 [max: 8192] direct dependencies: 32414 [max: 131072] indirect dependencies: 179336 all direct dependencies: 2759360 dependency chains: 58604 [max: 65536] dependency chain hlocks used: 240980 [max: 327680] dependency chain hlocks lost: 0 in-hardirq chains: 130 in-softirq chains: 1966 in-process chains: 56507 stack-trace entries: 328496 [max: 1048576] number of stack traces: 14667 number of stack hash chains: 9688 combined max dependencies:hardirq-safe locks: 65 hardirq-unsafe locks: 7533 softirq-safe locks: 286 softirq-unsafe locks: 7220 irq-safe locks: 297 irq-unsafe locks: 7533 hardirq-read-safe locks: 4 hardirq-read-unsafe locks: 177 softirq-read-safe locks: 17 softirq-read-unsafe locks: 165 irq-read-safe locks: 17 irq-read-unsafe locks: 177 uncategorized locks: 344 unused locks: 1 max locking depth: 20 max bfs queue depth: 1371 max lock class index: 8191 debug_locks: 0 zapped classes: 12368 zapped lock chains: 257558 large chain blocks: 1 all lock classes: FD: 1 BD: 284 -.-.: (console_sem).lock FD: 191 BD: 5 +.+.: console_lock ->pool_lock#2 ->&obj_hash[i].lock ->&____s->seqcount ->&c->lock ->kbd_event_lock ->(console_sem).lock ->console_owner_lock ->fs_reclaim ->&x->wait#9 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#11 ->&fb_info->lock ->vt_event_lock ->&base->lock ->subsys mutex#6 ->&helper->lock ->&helper->damage_lock ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock FD: 1 BD: 1 ....: console_srcu FD: 33 BD: 1 +.+.: fill_pool_map-wait-type-override ->&____s->seqcount ->&c->lock ->pool_lock#2 ->pool_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 2 BD: 2957 -.-.: &obj_hash[i].lock ->pool_lock FD: 1 BD: 2957 -.-.: pool_lock FD: 719 BD: 15 +.+.: cgroup_mutex ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&obj_hash[i].lock ->cgroup_file_kn_lock ->css_set_lock ->blkcg_pol_mutex ->percpu_counters_lock ->shrinker_mutex ->&base->lock ->devcgroup_mutex ->cpu_hotplug_lock ->fs_reclaim ->&x->wait#2 ->&rq->__lock ->cgroup_mutex.wait_lock ->cgroup_rstat_lock ->&n->list_lock ->cpuset_mutex ->&dom->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->batched_entropy_u32.lock ->cgroup_idr_lock ->task_group_lock ->(wq_completion)cpuset_migrate_mm ->&wq->mutex ->&____s->seqcount#2 ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->remove_cache_srcu FD: 66 BD: 1 +.+.: fixmap_lock ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 FD: 255 BD: 98 ++++: cpu_hotplug_lock ->jump_label_mutex ->cpuhp_state_mutex ->wq_pool_mutex ->freezer_mutex ->rcu_tasks_trace__percpu.cbs_pcpu_lock ->&ACCESS_PRIVATE(rtpcp, lock) ->smpboot_threads_lock ->&obj_hash[i].lock ->&pool->lock ->&rq->__lock ->&x->wait#5 ->mem_hotplug_lock ->mem_hotplug_lock.waiters.lock ->mem_hotplug_lock.rss.gp_wait.lock ->cpu_hotplug_lock.rss.gp_wait.lock ->rcu_node_0 ->&swhash->hlist_mutex ->pmus_lock ->pcp_batch_high_lock ->&xa->xa_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->kthread_create_lock ->&p->pi_lock ->&x->wait ->wq_pool_attach_mutex ->pcpu_alloc_mutex ->sparse_irq_lock ->cpu_hotplug_lock.waiters.lock ->&x->wait#6 ->cpuhp_state-up ->stop_cpus_mutex ->&wq->mutex ->hrtimer_bases.lock ->flush_lock ->xps_map_mutex ->css_set_lock ->cpuset_mutex ->cgroup_threadgroup_rwsem ->cgroup_threadgroup_rwsem.waiters.lock ->cgroup_threadgroup_rwsem.rss.gp_wait.lock ->&list->lock#12 ->(work_completion)(flush) ->wq_pool_mutex.wait_lock ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 30 BD: 103 +.+.: jump_label_mutex ->patch_lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 113 BD: 10 +.+.: console_mutex ->&port_lock_key ->syslog_lock ->(console_sem).lock ->&rq->__lock ->&root->kernfs_rwsem ->kernfs_notify_lock FD: 1 BD: 167 ..-.: input_pool.lock FD: 1 BD: 2931 ..-.: base_crng.lock FD: 1 BD: 104 ....: patch_lock FD: 1 BD: 1 ....: rcu_read_lock FD: 1 BD: 1 ....: crng_init_wait.lock FD: 1 BD: 1 ....: early_pfn_lock FD: 1 BD: 12 ....: devtree_lock FD: 1 BD: 1 ....: rcu_read_lock_sched FD: 10 BD: 103 ++++: resource_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount FD: 1 BD: 1 ....: restart_handler_list.lock FD: 723 BD: 2 +.+.: system_transition_mutex ->(pm_chain_head).rwsem ->device_hotplug_lock ->&rq->__lock ->system_transition_mutex.wait_lock ->&cfs_rq->removed.lock ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->&obj_hash[i].lock ->(console_sem).lock ->&c->lock ->rcu_node_0 ->remove_cache_srcu ->(pm_chain_head).rwsem.wait_lock ->&p->pi_lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&mm->mmap_lock ->console_owner_lock ->console_owner ->&rcu_state.expedited_wq FD: 3 BD: 560 ..-.: pcpu_lock ->stock_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 1 ....: debug_hook_lock FD: 2 BD: 1 ....: zonelist_update_seq ->zonelist_update_seq.seqcount FD: 1 BD: 2 ....: zonelist_update_seq.seqcount FD: 191 BD: 99 +.+.: cpuhp_state_mutex ->cpuhp_state-down ->cpuhp_state-up ->resource_lock ->pool_lock#2 ->(console_sem).lock ->clockevents_lock ->&irq_desc_lock_class ->&p->pi_lock ->&x->wait#6 ->&rq->__lock ->fs_reclaim ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->crypto_alg_sem ->scomp_lock FD: 2 BD: 2871 -.-.: &zone->lock ->&____s->seqcount FD: 1 BD: 2916 .-.-: &____s->seqcount FD: 27 BD: 106 +.+.: &pcp->lock ->&zone->lock FD: 1 BD: 2983 -.-.: pool_lock#2 FD: 75 BD: 171 +.+.: pcpu_alloc_mutex ->pcpu_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->&obj_hash[i].lock ->&rq->__lock ->&cfs_rq->removed.lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->pcpu_alloc_mutex.wait_lock ->stock_lock ->key ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&base->lock ->purge_vmap_area_lock FD: 6 BD: 2905 -.-.: &n->list_lock ->&c->lock FD: 5 BD: 2926 -.-.: &c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 133 BD: 66 +.+.: slab_mutex ->pool_lock#2 ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->fs_reclaim ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rq->__lock ->lock ->&root->kernfs_rwsem ->&k->list_lock ->&obj_hash[i].lock FD: 81 BD: 3 +.+.: trace_types_lock ->fs_reclaim ->pool_lock#2 ->pin_fs_lock ->&sb->s_type->i_mutex_key#5 FD: 1 BD: 2 ....: panic_notifier_list.lock FD: 1 BD: 1 ....: die_chain.lock FD: 68 BD: 4 +.+.: trace_event_sem ->trace_event_ida.xa_lock ->&rq->__lock ->fs_reclaim ->batched_entropy_u8.lock ->kfence_freelist_lock ->pool_lock#2 ->eventfs_mutex ->&c->lock ->&____s->seqcount FD: 3 BD: 574 ..-.: batched_entropy_u32.lock ->crngs.lock FD: 2 BD: 2930 ..-.: crngs.lock ->base_crng.lock FD: 17 BD: 405 +.+.: sysctl_lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 25 BD: 2810 -.-.: &rq->__lock ->&per_cpu_ptr(group->pcpu, cpu)->seq ->&obj_hash[i].lock ->&base->lock ->&cfs_rq->removed.lock ->&rt_b->rt_runtime_lock ->&cp->lock ->&rt_rq->rt_runtime_lock ->pool_lock#2 ->cpu_asid_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->cid_lock FD: 1 BD: 2811 ....: &cfs_b->lock FD: 26 BD: 1 ....: init_task.pi_lock ->&rq->__lock FD: 1 BD: 1 ....: init_task.vtime_seqcount FD: 78 BD: 102 +.+.: wq_pool_mutex ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&wq->mutex ->&obj_hash[i].lock ->fs_reclaim ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->wq_pool_attach_mutex ->&pool->lock ->&xa->xa_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&n->list_lock ->&____s->seqcount#2 ->remove_cache_srcu ->wq_pool_mutex.wait_lock ->rcu_node_0 ->quarantine_lock ->&base->lock ->&cfs_rq->removed.lock FD: 36 BD: 115 +.+.: &wq->mutex ->&pool->lock ->&x->wait#10 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->rcu_node_0 FD: 30 BD: 621 -.-.: &pool->lock ->&obj_hash[i].lock ->&p->pi_lock ->pool_lock#2 ->(worker)->lock ->wq_mayday_lock ->&base->lock ->&x->wait#10 FD: 66 BD: 57 +.+.: shrinker_mutex ->pool_lock#2 ->fs_reclaim ->&c->lock ->&____s->seqcount FD: 1 BD: 644 -.-.: rcu_node_0 FD: 13 BD: 55 -.-.: rcu_state.barrier_lock ->rcu_node_0 ->&obj_hash[i].lock FD: 31 BD: 3 ....: &rnp->exp_poll_lock FD: 9 BD: 5 ....: trace_event_ida.xa_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 FD: 1 BD: 1 ....: trigger_cmd_mutex FD: 10 BD: 241 +.+.: free_vmap_area_lock ->&obj_hash[i].lock ->pool_lock#2 ->init_mm.page_table_lock ->&____s->seqcount ->&meta->lock ->kfence_freelist_lock ->quarantine_lock FD: 1 BD: 242 +.+.: vmap_area_lock FD: 267 BD: 1 ....: acpi_probe_mutex ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&zone->lock ->&____s->seqcount ->init_mm.page_table_lock ->resource_lock ->&c->lock ->cpu_hotplug_lock ->(console_sem).lock ->irq_domain_mutex ->pcpu_alloc_mutex ->&domain->mutex ->&desc->request_mutex ->&irq_desc_lock_class ->cpu_pm_notifier.lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->iort_msi_chip_lock ->its_lock ->efi_mem_reserve_persistent_lock ->lpi_range_lock ->syscore_ops_lock ->clocksource_mutex FD: 5 BD: 242 +.+.: init_mm.page_table_lock ->&obj_hash[i].lock FD: 66 BD: 5 +.+.: irq_domain_mutex ->pool_lock#2 ->fs_reclaim FD: 149 BD: 7 +.+.: &domain->mutex ->sparse_irq_lock ->pool_lock#2 ->&irq_desc_lock_class ->fs_reclaim ->&obj_hash[i].lock ->&its->dev_alloc_lock FD: 146 BD: 105 +.+.: sparse_irq_lock ->&____s->seqcount ->pool_lock#2 ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&c->lock ->(cpu_running).wait.lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->&x->wait#6 ->&p->pi_lock ->&irq_desc_lock_class ->fs_reclaim ->lock ->&root->kernfs_rwsem ->proc_subdir_lock ->&ent->pde_unload_lock ->proc_inum_ida.xa_lock ->sysfs_symlink_target_lock ->kernfs_idr_lock FD: 7 BD: 115 -.-.: &irq_desc_lock_class ->irq_controller_lock ->mask_lock ->&its->lock ->irq_resend_lock ->tmp_mask_lock FD: 20 BD: 14 +.+.: &desc->request_mutex ->&irq_desc_lock_class ->proc_subdir_lock ->&ent->pde_unload_lock ->proc_inum_ida.xa_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 ....: cpu_pm_notifier.lock FD: 16 BD: 177 +.+.: purge_vmap_area_lock ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&base->lock FD: 1 BD: 3 +.+.: iort_msi_chip_lock FD: 2 BD: 2 ....: its_lock ->&its->lock FD: 1 BD: 2 ....: efi_mem_reserve_persistent_lock FD: 4 BD: 9 +.+.: lpi_range_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 +.+.: syscore_ops_lock FD: 1 BD: 120 ....: &its->lock FD: 1 BD: 100 +.+.: cpuhp_state-down FD: 182 BD: 100 +.+.: cpuhp_state-up ->smpboot_threads_lock ->sparse_irq_lock ->&swhash->hlist_mutex ->pmus_lock ->&x->wait#4 ->&obj_hash[i].lock ->hrtimer_bases.lock ->wq_pool_mutex ->rcu_node_0 ->resource_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&rq->__lock ->fs_reclaim ->lock ->&root->kernfs_rwsem ->&x->wait#9 ->&k->list_lock ->bus_type_sem ->&k->k_lock ->&pcp->lock ->swap_slots_cache_mutex FD: 4 BD: 1 -.-.: timekeeper_lock ->tk_core.seq.seqcount FD: 3 BD: 2852 ----: tk_core.seq.seqcount ->&obj_hash[i].lock FD: 1 BD: 116 ....: irq_controller_lock FD: 7 BD: 100 ....: clockevents_lock ->tk_core.seq.seqcount ->tick_broadcast_lock ->jiffies_seq.seqcount FD: 3 BD: 101 -...: tick_broadcast_lock ->jiffies_lock FD: 1 BD: 103 -.-.: jiffies_seq.seqcount FD: 256 BD: 2 +.+.: clocksource_mutex ->cpu_hotplug_lock ->(console_sem).lock FD: 13 BD: 2858 -.-.: &base->lock ->&obj_hash[i].lock FD: 3 BD: 5 ....: batched_entropy_u64.lock ->crngs.lock FD: 152 BD: 101 +.+.: pmus_lock ->pcpu_alloc_mutex ->pool_lock#2 ->&obj_hash[i].lock ->&cpuctx_mutex ->fs_reclaim ->&k->list_lock ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->uevent_sock_mutex ->running_helpers_waitq.lock ->&rq->__lock ->&x->wait#9 ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->subsys mutex#29 FD: 1 BD: 101 +.+.: &swhash->hlist_mutex FD: 1 BD: 102 +.+.: &cpuctx_mutex FD: 1 BD: 3 ....: tty_ldiscs_lock FD: 2 BD: 6 ....: kbd_event_lock ->led_lock FD: 1 BD: 7 ..-.: led_lock FD: 1 BD: 280 ..-.: console_owner_lock FD: 40 BD: 3 +.+.: init_task.alloc_lock ->init_fs.lock FD: 69 BD: 2 +.+.: acpi_ioremap_lock ->pool_lock#2 ->fs_reclaim ->&____s->seqcount ->&c->lock ->free_vmap_area_lock ->vmap_area_lock FD: 1 BD: 15 ....: semaphore->lock FD: 1 BD: 14 +.+.: *(&acpi_gbl_reference_count_lock) FD: 14 BD: 2836 -.-.: hrtimer_bases.lock ->tk_core.seq.seqcount ->&obj_hash[i].lock FD: 1 BD: 537 ..-.: percpu_counters_lock FD: 32 BD: 2 +.+.: tomoyo_policy_lock ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->&c->lock ->&____s->seqcount ->&n->list_lock ->remove_cache_srcu FD: 870 BD: 4 ++++: pernet_ops_rwsem ->stack_depot_init_mutex ->crngs.lock ->net_rwsem ->proc_inum_ida.xa_lock ->pool_lock#2 ->proc_subdir_lock ->fs_reclaim ->&____s->seqcount ->&c->lock ->sysctl_lock ->pcpu_alloc_mutex ->net_generic_ids.xa_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&dir->lock ->&obj_hash[i].lock ->k-sk_lock-AF_NETLINK ->k-slock-AF_NETLINK ->nl_table_lock ->nl_table_wait.lock ->rtnl_mutex ->uevent_sock_mutex ->&net->rules_mod_lock ->slab_mutex ->&zone->lock ->batched_entropy_u32.lock ->percpu_counters_lock ->k-slock-AF_INET/1 ->cache_list_lock ->rcu_node_0 ->&rq->__lock ->tk_core.seq.seqcount ->&k->list_lock ->lock ->&root->kernfs_rwsem ->running_helpers_waitq.lock ->&sn->pipefs_sb_lock ->krc.lock ->pool_lock ->&s->s_inode_list_lock ->nf_hook_mutex ->cpu_hotplug_lock ->hwsim_netgroup_ida.xa_lock ->nf_ct_ecache_mutex ->nf_log_mutex ->ipvs->est_mutex ->&base->lock ->__ip_vs_app_mutex ->&hashinfo->lock#2 ->&net->ipv6.ip6addrlbl_table.lock ->(console_sem).lock ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->k-clock-AF_INET6 ->wq_pool_mutex ->pcpu_lock ->&list->lock#4 ->&dir->lock#2 ->ptype_lock ->k-clock-AF_TIPC ->k-sk_lock-AF_TIPC ->k-slock-AF_TIPC ->&this->receive_lock ->once_lock ->nf_ct_proto_mutex ->k-sk_lock-AF_RXRPC ->k-slock-AF_RXRPC ->&rxnet->conn_lock ->&call->waitq ->&rx->call_lock ->&rxnet->call_lock ->&n->list_lock ->rdma_nets.xa_lock ->devices_rwsem ->rtnl_mutex.wait_lock ->&p->pi_lock ->&sem->wait_lock ->&____s->seqcount#2 ->&net->nsid_lock ->ebt_mutex ->nf_nat_proto_mutex ->&xt[i].mutex ->&nft_net->commit_mutex ->netns_bpf_mutex ->&x->wait#2 ->(&net->fs_probe_timer) ->&net->cells_lock ->(&net->cells_timer) ->bit_wait_table + i ->(&net->fs_timer) ->(wq_completion)kafsd ->&wq->mutex ->k-clock-AF_RXRPC ->&local->services_lock ->(wq_completion)krxrpcd ->rlock-AF_RXRPC ->&x->wait ->&cfs_rq->removed.lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&ent->pde_unload_lock ->ovs_mutex ->(work_completion)(&(&ovs_net->masks_rebalance)->work) ->(work_completion)(&ovs_net->dp_notify_work) ->&srv->idr_lock ->rcu_state.exp_mutex.wait_lock ->(work_completion)(&tn->work) ->&rnp->exp_lock ->rcu_state.exp_mutex ->&tn->nametbl_lock ->&rnp->exp_wq[2] ->&rnp->exp_wq[3] ->(work_completion)(&ht->run_work) ->&ht->mutex ->(work_completion)(&(&c->work)->work) ->(wq_completion)krdsd ->(work_completion)(&rtn->rds_tcp_accept_w) ->rds_tcp_conn_lock ->loop_conns_lock ->(wq_completion)l2tp ->rcu_state.barrier_mutex ->(&rxnet->peer_keepalive_timer) ->(work_completion)(&rxnet->peer_keepalive_work) ->(&rxnet->service_conn_reap_timer) ->&x->wait#10 ->dev_base_lock ->lweventlist_lock ->napi_hash_lock ->netdev_unregistering_wq.lock ->rcu_state.barrier_mutex.wait_lock ->&fn->fou_lock ->remove_cache_srcu ->ipvs->sync_mutex ->hwsim_radio_lock ->pin_fs_lock ->&dentry->d_lock ->&sb->s_type->i_mutex_key#3 ->&sb->s_type->i_lock_key#7 ->mount_lock ->(inetaddr_chain).rwsem ->inet6addr_chain.lock ->(work_completion)(&local->restart_work) ->&list->lock#16 ->&rdev->wiphy.mtx ->(work_completion)(&rfkill->uevent_work) ->(work_completion)(&rfkill->sync_work) ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->&k->k_lock ->sysfs_symlink_target_lock ->subsys mutex#40 ->&x->wait#9 ->dpm_list_mtx ->&dev->power.lock ->deferred_probe_mutex ->device_links_lock ->&rfkill->lock ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->triggers_list_lock ->leds_list_lock ->(work_completion)(&rdev->wiphy_work) ->(work_completion)(&rdev->conn_work) ->(work_completion)(&rdev->event_work) ->(work_completion)(&(&rdev->dfs_update_channels_wk)->work) ->(work_completion)(&(&rdev->background_cac_done_wk)->work) ->(work_completion)(&rdev->destroy_work) ->(work_completion)(&rdev->propagate_radar_detect_wk) ->(work_completion)(&rdev->propagate_cac_done_wk) ->(work_completion)(&rdev->mgmt_registrations_update_wk) ->(work_completion)(&rdev->background_cac_abort_wk) ->subsys mutex#53 ->gdp_mutex ->(&local->sta_cleanup) ->rdma_nets_rwsem ->k-clock-AF_NETLINK ->&nlk->wait ->quarantine_lock ->&wg->device_update_lock ->&bat_priv->forw_bcast_list_lock ->&bat_priv->forw_bat_list_lock ->&bat_priv->gw.list_lock ->(work_completion)(&(&bat_priv->bat_v.ogm_wq)->work) ->&bat_priv->bat_v.ogm_buff_mutex ->&bat_priv->tvlv.container_list_lock ->&bat_priv->tvlv.handler_list_lock ->(work_completion)(&(&bat_priv->nc.work)->work) ->key#17 ->key#18 ->(work_completion)(&(&bat_priv->dat.work)->work) ->&hash->list_locks[i] ->(work_completion)(&(&bat_priv->bla.work)->work) ->key#20 ->(work_completion)(&(&bat_priv->mcast.work)->work) ->(work_completion)(&(&bat_priv->tt.work)->work) ->key#16 ->key#21 ->&bat_priv->tt.req_list_lock ->&bat_priv->tt.changes_list_lock ->&bat_priv->tt.roam_list_lock ->(work_completion)(&(&bat_priv->orig_work)->work) ->key#19 ->wq_mayday_lock ->&hn->hn_lock ->&pnettable->lock ->&pnetids_ndev->lock ->k-sk_lock-AF_INET6/1 ->&net->sctp.addr_wq_lock ->&rcu_state.expedited_wq ->k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->&sn->gssp_lock ->&cd->hash_lock ->(&net->can.stattimer) ->vmap_area_lock ->purge_vmap_area_lock ->stock_lock ->xfrm_state_gc_work ->&net->xfrm.xfrm_state_lock ->&sb->s_type->i_lock_key#23 ->rename_lock.seqcount ->(work_completion)(&(&net->ipv6.addr_chk_work)->work) ->ip6_fl_lock ->(&net->ipv6.ip6_fib_timer) ->__ip_vs_mutex ->(&ipvs->dest_trash_timer) ->(work_completion)(&(&ipvs->expire_nodest_conn_work)->work) ->(work_completion)(&(&ipvs->defense_work)->work) ->(work_completion)(&(&ipvs->est_reload_work)->work) ->nfnl_subsys_ipset ->recent_lock ->hashlimit_mutex ->trans_gc_work ->nf_conntrack_mutex ->(work_completion)(&(&cnet->ecache.dwork)->work) ->tcp_metrics_lock ->k-clock-AF_INET ->(work_completion)(&net->xfrm.policy_hash_work) ->&net->xfrm.xfrm_policy_lock ->(work_completion)(&net->xfrm.state_hash_work) ->&xa->xa_lock#4 ->genl_sk_destructing_waitq.lock ->&rnp->exp_wq[0] ->uevent_sock_mutex.wait_lock ->&rnp->exp_wq[1] ->&lock->wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->pcpu_alloc_mutex.wait_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&meta->lock ->key ->key#25 ->(wq_completion)tipc_rcv ->(wq_completion)tipc_send ->(wq_completion)tipc_crypto ->(wq_completion)phy340 ->(wq_completion)phy339 ->(wq_completion)bond0#169 ->(wq_completion)tipc_rcv#2 ->(wq_completion)tipc_send#2 ->(wq_completion)tipc_crypto#2 ->(wq_completion)phy342 ->(wq_completion)phy341 ->(wq_completion)bond0#170 ->(wq_completion)tipc_rcv#3 ->(wq_completion)tipc_send#3 ->(wq_completion)tipc_crypto#3 ->(wq_completion)phy338 ->(wq_completion)phy337 FD: 26 BD: 56 +.+.: stack_depot_init_mutex ->&rq->__lock FD: 35 BD: 74 ++++: net_rwsem ->&list->lock#2 ->&rq->__lock ->pool_lock#2 ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->net_rwsem.wait_lock ->&rcu_state.expedited_wq ->quarantine_lock ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 10 BD: 120 ..-.: proc_inum_ida.xa_lock ->pool_lock#2 ->&c->lock ->&n->list_lock ->&obj_hash[i].lock FD: 711 BD: 51 +.+.: rtnl_mutex ->&c->lock ->&zone->lock ->&____s->seqcount ->pool_lock#2 ->fs_reclaim ->pcpu_alloc_mutex ->&xa->xa_lock#4 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#20 ->&dir->lock#2 ->dev_hotplug_mutex ->dev_base_lock ->input_pool.lock ->nl_table_lock ->nl_table_wait.lock ->net_rwsem ->batched_entropy_u32.lock ->&tbl->lock ->sysctl_lock ->krc.lock ->&rq->__lock ->stack_depot_init_mutex ->cpu_hotplug_lock ->kthread_create_lock ->&p->pi_lock ->&x->wait ->wq_pool_mutex ->crngs.lock ->lweventlist_lock ->rtnl_mutex.wait_lock ->proc_subdir_lock ->proc_inum_ida.xa_lock ->&cfs_rq->removed.lock ->&k->k_lock ->param_lock ->(console_sem).lock ->&rdev->wiphy.mtx ->&base->lock ->subsys mutex#55 ->&sdata->sec_mtx ->&local->iflist_mtx#2 ->lock#7 ->failover_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&tn->lock ->&idev->mc_lock ->&ndev->lock ->&pnettable->lock ->smc_ib_devices.mutex ->&(&net->nexthop.notifier_chain)->rwsem ->reg_requests_lock ->reg_pending_beacons_lock ->devnet_rename_sem ->&x->wait#2 ->&nft_net->commit_mutex ->&ent->pde_unload_lock ->&n->list_lock ->target_list_lock ->rlock-AF_NETLINK ->(inetaddr_validator_chain).rwsem ->(inetaddr_chain).rwsem ->_xmit_LOOPBACK ->netpoll_srcu ->quarantine_lock ->remove_cache_srcu ->&in_dev->mc_tomb_lock ->&im->lock ->fib_info_lock ->cbs_list_lock ->(inet6addr_validator_chain).rwsem ->&net->ipv6.addrconf_hash_lock ->&ifa->lock ->&tb->tb6_lock ->&dev_addr_list_lock_key ->napi_hash_lock ->lapb_list_lock ->&sem->wait_lock ->x25_neigh_list_lock ->console_owner_lock ->console_owner ->_xmit_ETHER ->_xmit_SLIP ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->&cma->lock ->cma_mutex ->rcu_node_0 ->&rfkill->lock ->&dev->tx_global_lock ->&rnp->exp_wq[2] ->&sch->q.lock ->class ->(&tbl->proxy_timer) ->_xmit_VOID ->_xmit_X25 ->&lapbeth->up_lock ->&lapb->lock ->&dir->lock ->&rnp->exp_wq[3] ->&ul->lock#2 ->&n->lock ->&wpan_dev->association_lock ->dev_addr_sem ->_xmit_IEEE802154 ->&nr_netdev_addr_lock_key ->listen_lock ->&r->consumer_lock ->pool_lock ->&mm->mmap_lock ->pcpu_lock ->key ->percpu_counters_lock ->(switchdev_blocking_notif_chain).rwsem ->&br->hash_lock ->nf_hook_mutex ->j1939_netdev_lock ->hrtimer_bases.lock ->&bat_priv->tvlv.handler_list_lock ->&bat_priv->tvlv.container_list_lock ->&bat_priv->softif_vlan_list_lock ->key#16 ->&bat_priv->tt.changes_list_lock ->kernfs_idr_lock ->noop_qdisc.q.lock ->tk_core.seq.seqcount ->&wq->mutex ->init_lock ->&rnp->exp_wq[0] ->deferred_lock ->&br->lock ->&pn->hash_lock ->&rnp->exp_wq[1] ->&hard_iface->bat_iv.ogm_buff_mutex ->ptype_lock ->_xmit_NONE ->lock#9 ->&hsr->list_lock ->&meta->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->mount_lock ->&xa->xa_lock#18 ->&dev_addr_list_lock_key#3/1 ->req_lock ->&x->wait#11 ->subsys mutex#75 ->&rcu_state.expedited_wq ->bpf_devs_lock ->(work_completion)(&(&devlink_port->type_warn_dw)->work) ->&devlink_port->type_lock ->&vn->sock_lock ->&wg->device_update_lock ->_xmit_SIT ->&bridge_netdev_addr_lock_key/1 ->_xmit_TUNNEL ->_xmit_IPGRE ->_xmit_TUNNEL6 ->&dev_addr_list_lock_key/1 ->&dev_addr_list_lock_key#2/1 ->_xmit_ETHER/1 ->&nn->netlink_tap_lock ->&batadv_netdev_addr_lock_key/1 ->&vlan_netdev_addr_lock_key/1 ->&macvlan_netdev_addr_lock_key/1 ->&ipvlan->addrs_lock ->&macsec_netdev_addr_lock_key/1 ->key#20 ->&bat_priv->tt.commit_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->k-slock-AF_INET/1 ->k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->&____s->seqcount#2 ->&ul->lock ->&lock->wait_lock ->&tun->lock ->dev->qdisc_tx_busylock ?: &qdisc_tx_busylock ->__ip_vs_mutex ->flowtable_lock ->&idev->mc_query_lock ->(work_completion)(&(&idev->mc_report_work)->work) ->&hwstats->hwsdev_list_lock ->&net->xdp.lock ->mirred_list_lock ->&idev->mc_report_lock ->&sb->s_type->i_lock_key#23 ->&dentry->d_lock ->rename_lock.seqcount ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&pnn->pndevs.lock ->&pnn->routes.lock ->dev_pm_qos_sysfs_mtx ->deferred_probe_mutex ->device_links_lock ->&net->xfrm.xfrm_state_lock ->&net->xfrm.xfrm_policy_lock ->&sb->s_type->i_lock_key#7 ->&rnp->exp_lock ->rcu_state.exp_mutex ->rcu_state.exp_mutex.wait_lock ->(work_completion)(&wdev->disconnect_wk) ->(work_completion)(&wdev->pmsr_free_wk) ->&rdev->dev_wait ->&fq->lock ->&app->lock ->(&app->join_timer) ->(&app->periodic_timer) ->&list->lock#10 ->(&app->join_timer)#2 ->&app->lock#2 ->&list->lock#11 ->(work_completion)(&(&priv->scan_result)->work) ->(work_completion)(&(&priv->connect)->work) ->(&hsr->prune_timer) ->(&hsr->announce_timer) ->key#19 ->&bat_priv->forw_bcast_list_lock ->&bat_priv->forw_bat_list_lock ->(work_completion)(&(&forw_packet_aggr->delayed_work)->work) ->(&pmctx->ip6_mc_router_timer) ->(&pmctx->ip4_mc_router_timer) ->(work_completion)(&ht->run_work) ->&ht->mutex ->&br->multicast_lock ->(work_completion)(&(&br->gc_work)->work) ->dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 ->&table->hash[i].lock ->k-clock-AF_INET6 ->k-clock-AF_INET ->&r->consumer_lock#2 ->&wg->socket_update_lock ->(work_completion)(&(&bond->mii_work)->work) ->(work_completion)(&(&bond->arp_work)->work) ->(work_completion)(&(&bond->alb_work)->work) ->(work_completion)(&(&bond->ad_work)->work) ->(work_completion)(&(&bond->mcast_work)->work) ->(work_completion)(&(&bond->slave_arr_work)->work) ->(&br->hello_timer) ->(&br->topology_change_timer) ->(&br->tcn_timer) ->(&brmctx->ip4_mc_router_timer) ->(&brmctx->ip4_other_query.timer) ->(&brmctx->ip4_other_query.delay_timer) ->(&brmctx->ip4_own_query.timer) ->(&brmctx->ip6_mc_router_timer) ->(&brmctx->ip6_other_query.timer) ->(&brmctx->ip6_other_query.delay_timer) ->(&brmctx->ip6_own_query.timer) ->raw_notifier_lock ->bcm_notifier_lock ->isotp_notifier_lock ->(work_completion)(&port->bc_work) ->(work_completion)(&port->wq) ->(work_completion)(&(&slave->notify_work)->work) ->_xmit_NETROM#2 ->&pmc->lock ->(&mp->timer) ->(work_completion)(&br->mcast_gc_work) ->rcu_state.barrier_mutex ->&caifn->caifdevs.lock ->&net->rules_mod_lock ->(&mrt->ipmr_expire_timer) ->reg_indoor_lock ->stock_lock ->uevent_sock_mutex.wait_lock ->netlbl_unlhsh_lock ->nr_list_lock ->nr_neigh_list_lock ->&bpq_netdev_addr_lock_key ->gdp_mutex.wait_lock ->pcpu_alloc_mutex.wait_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&x->wait#10 ->key#23 ->net_rwsem.wait_lock ->x25_list_lock ->x25_forward_list_lock ->x25_route_list_lock ->(inetaddr_chain).rwsem.wait_lock ->team->team_lock_key#159 ->team->team_lock_key#163 ->team->team_lock_key#165 ->team->team_lock_key#171 ->dev_pm_qos_sysfs_mtx.wait_lock ->team->team_lock_key#172 FD: 60 BD: 197 +.+.: lock ->kernfs_idr_lock ->cgroup_idr_lock ->pidmap_lock ->drm_minor_lock ->&file_private->table_lock ->&q->queue_lock ->&group->inotify_data.idr_lock ->map_idr_lock ->prog_idr_lock FD: 13 BD: 208 +.+.: kernfs_idr_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 101 BD: 199 ++++: &root->kernfs_rwsem ->&root->kernfs_iattr_rwsem ->kernfs_idr_lock ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock ->&sem->wait_lock ->&c->lock ->rcu_node_0 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->inode_hash_lock ->fs_reclaim ->mmu_notifier_invalidate_range_start ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#24 ->remove_cache_srcu ->batched_entropy_u8.lock ->kernfs_rename_lock ->&n->list_lock ->&sb->s_type->i_lock_key#30 ->&sb->s_type->i_lock_key#31 ->key ->pcpu_lock ->percpu_counters_lock ->&xa->xa_lock#5 ->stock_lock ->&____s->seqcount#2 ->&p->pi_lock ->&rcu_state.expedited_wq ->&base->lock FD: 1 BD: 4 ++++: file_systems_lock FD: 71 BD: 203 ++++: &root->kernfs_iattr_rwsem ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->iattr_mutex ->&sem->wait_lock ->tk_core.seq.seqcount ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: dq_list_lock FD: 5 BD: 45 +.+.: sb_lock ->unnamed_dev_ida.xa_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 101 BD: 1 +.+.: &type->s_umount_key/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->percpu_counters_lock ->crngs.lock ->&sbinfo->stat_lock ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&dentry->d_lock ->fs_reclaim ->mmu_notifier_invalidate_range_start FD: 26 BD: 39 +.+.: list_lrus_mutex ->&rq->__lock FD: 1 BD: 46 ....: unnamed_dev_ida.xa_lock FD: 1 BD: 19 +.+.: &sbinfo->stat_lock FD: 51 BD: 164 +.+.: &sb->s_type->i_lock_key ->&dentry->d_lock ->&xa->xa_lock#9 ->&p->pi_lock FD: 1 BD: 318 +.+.: &s->s_inode_list_lock FD: 37 BD: 403 +.+.: &dentry->d_lock ->&wq ->&dentry->d_lock/1 ->&obj_hash[i].lock ->pool_lock#2 ->&wq#2 ->&lru->node[i].lock ->sysctl_lock ->&dentry->d_lock/2 ->&p->pi_lock FD: 2 BD: 29 ....: mnt_id_ida.xa_lock ->pool_lock#2 FD: 41 BD: 137 +.+.: mount_lock ->mount_lock.seqcount ->&dentry->d_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 39 BD: 137 +.+.: mount_lock.seqcount ->&new_ns->poll ->&dentry->d_lock ->&obj_hash[i].lock ->pool_lock#2 ->&p->pi_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#2/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->&c->lock ->&____s->seqcount ->list_lrus_mutex ->sb_lock ->&sb->s_type->i_lock_key#2 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 150 +.+.: &sb->s_type->i_lock_key#2 ->&dentry->d_lock FD: 1 BD: 4 ..-.: ucounts_lock FD: 39 BD: 154 +.+.: init_fs.lock ->init_fs.seq.seqcount ->&dentry->d_lock FD: 1 BD: 146 +.+.: init_fs.seq.seqcount FD: 2 BD: 102 -.-.: jiffies_lock ->jiffies_seq.seqcount FD: 27 BD: 1 -.-.: log_wait.lock ->&p->pi_lock FD: 100 BD: 1 +.+.: &type->s_umount_key#3/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 51 BD: 278 +.+.: &sb->s_type->i_lock_key#3 ->&dentry->d_lock ->&xa->xa_lock#9 FD: 1 BD: 121 ++++: proc_subdir_lock FD: 93 BD: 1 +.+.: &type->s_umount_key#4/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&sb->s_type->i_lock_key#4 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 39 BD: 2 +.+.: &sb->s_type->i_lock_key#4 ->&dentry->d_lock ->bit_wait_table + i FD: 32 BD: 158 ..-.: cgroup_file_kn_lock ->kernfs_notify_lock ->&obj_hash[i].lock ->&base->lock FD: 34 BD: 148 ..-.: css_set_lock ->cgroup_file_kn_lock ->&p->pi_lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 2 BD: 198 +...: cgroup_idr_lock ->pool_lock#2 FD: 82 BD: 101 +.+.: cpuset_mutex ->callback_lock ->jump_label_mutex ->&p->pi_lock ->&p->alloc_lock ->cpuset_attach_wq.lock FD: 1 BD: 102 ....: callback_lock FD: 76 BD: 16 +.+.: blkcg_pol_mutex ->pcpu_alloc_mutex ->fs_reclaim ->pool_lock#2 ->&c->lock FD: 1 BD: 16 +.+.: devcgroup_mutex FD: 36 BD: 101 +.+.: freezer_mutex ->freezer_lock ->rcu_node_0 ->&rq->__lock ->freezer_mutex.wait_lock ->&rcu_state.expedited_wq ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock FD: 46 BD: 63 +.+.: rcu_state.exp_mutex ->rcu_node_0 ->rcu_state.exp_wake_mutex ->&rcu_state.expedited_wq ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->&rnp->exp_wq[2] ->&rnp->exp_wq[0] ->&rnp->exp_wq[1] ->&rnp->exp_wq[3] ->&cfs_rq->removed.lock ->pool_lock#2 ->&____s->seqcount ->pool_lock ->rcu_state.exp_mutex.wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock FD: 38 BD: 109 +.+.: rcu_state.exp_wake_mutex ->rcu_node_0 ->&rnp->exp_lock ->&rnp->exp_wq[0] ->&rnp->exp_wq[1] ->&rnp->exp_wq[2] ->&rnp->exp_wq[3] ->&rq->__lock ->rcu_state.exp_wake_mutex.wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 134 +.+.: &rnp->exp_lock FD: 27 BD: 136 ....: &rnp->exp_wq[0] ->&p->pi_lock FD: 27 BD: 133 ....: &rnp->exp_wq[1] ->&p->pi_lock FD: 1 BD: 106 ....: init_sighand.siglock FD: 1 BD: 3 +.+.: init_files.file_lock FD: 27 BD: 251 ....: pidmap_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->&p->pi_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 152 BD: 100 ++++: cgroup_threadgroup_rwsem ->css_set_lock ->&p->pi_lock ->tk_core.seq.seqcount ->tasklist_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&c->lock ->&____s->seqcount ->key ->pcpu_lock ->percpu_counters_lock ->&sighand->siglock ->cgroup_threadgroup_rwsem.rss.gp_wait.lock ->&x->wait#2 ->inode_hash_lock ->fs_reclaim ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#30 ->&root->kernfs_iattr_rwsem ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->cpuset_mutex ->freezer_mutex ->&p->alloc_lock ->&____s->seqcount#2 ->cgroup_threadgroup_rwsem.waiters.lock ->stock_lock ->&rcu_state.expedited_wq ->&n->list_lock ->&rnp->exp_wq[0] ->&rnp->exp_lock ->&rnp->exp_wq[3] ->rcu_state.exp_mutex.wait_lock ->freezer_mutex.wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu ->&base->lock ->(console_sem).lock ->kernfs_pr_cont_lock ->cgroup_file_kn_lock FD: 26 BD: 988 -.-.: &p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock FD: 78 BD: 105 .+.+: tasklist_lock ->init_sighand.siglock ->&sighand->siglock ->&pid->wait_pidfd ->&obj_hash[i].lock ->quarantine_lock ->&base->lock ->stock_lock ->&p->alloc_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->freezer_lock FD: 1 BD: 2811 -.-.: &per_cpu_ptr(group->pcpu, cpu)->seq FD: 1 BD: 1 ....: (kthreadd_done).wait.lock FD: 39 BD: 155 ....: &sighand->siglock ->&sig->wait_chldexit ->input_pool.lock ->&(&sig->stats_lock)->lock ->&p->pi_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->hrtimer_bases.lock ->&obj_hash[i].lock ->&sighand->signalfd_wqh ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&tty->ctrl.lock ->&rq->__lock ->stock_lock ->&n->list_lock ->&____s->seqcount#2 ->&base->lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->quarantine_lock FD: 75 BD: 152 +.+.: &p->alloc_lock ->&____s->seqcount#2 ->init_fs.lock ->&fs->lock ->&x->wait ->cpu_asid_lock ->&x->wait#25 ->&newf->file_lock ->&sighand->siglock ->freezer_lock ->(console_sem).lock ->&p->pi_lock FD: 1 BD: 2879 .-.-: &____s->seqcount#2 FD: 65 BD: 554 +.+.: fs_reclaim ->mmu_notifier_invalidate_range_start ->&mapping->i_mmap_rwsem ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->icc_bw_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->stock_lock ->&rcu_state.expedited_wq ->&____s->seqcount FD: 34 BD: 575 +.+.: mmu_notifier_invalidate_range_start ->dma_fence_map ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&____s->seqcount FD: 1 BD: 117 +.+.: kthread_create_lock FD: 27 BD: 171 ....: &x->wait ->&p->pi_lock FD: 38 BD: 1 +.+.: sched_map-wait-type-override ->&pool->lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&__ctx->lock FD: 27 BD: 622 ....: (worker)->lock ->&p->pi_lock FD: 33 BD: 104 +.+.: wq_pool_attach_mutex ->&p->pi_lock ->&x->wait#7 ->&pool->lock ->&rq->__lock ->wq_pool_attach_mutex.wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 622 ..-.: wq_mayday_lock FD: 9 BD: 103 ....: &xa->xa_lock ->&c->lock ->&n->list_lock ->&____s->seqcount ->pool_lock#2 FD: 31 BD: 1 +.-.: (&pool->mayday_timer) ->&pool->lock ->&obj_hash[i].lock ->&base->lock FD: 53 BD: 1 +.+.: (wq_completion)rcu_gp ->(work_completion)(&rnp->exp_poll_wq) ->(work_completion)(&(&ssp->srcu_sup->work)->work) ->(work_completion)(&sdp->work) ->(work_completion)(&rew->rew_work) ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->rcu_node_0 FD: 32 BD: 2 +.+.: (work_completion)(&rnp->exp_poll_wq) ->&rnp->exp_poll_lock FD: 4 BD: 1 -.-.: (null) ->tk_core.seq.seqcount FD: 14 BD: 1 +.-.: (&wq_watchdog_timer) ->&obj_hash[i].lock ->&base->lock FD: 574 BD: 1 +.+.: (wq_completion)events_unbound ->(work_completion)(&(&kfence_timer)->work) ->(work_completion)(&entry->work) ->(next_reseed).work ->(stats_flush_dwork).work ->(work_completion)(&sub_info->work) ->deferred_probe_work ->(work_completion)(&barr->work) ->connector_reaper_work ->(reaper_work).work ->(work_completion)(&rdev->wiphy_work) ->(work_completion)(&port->bc_work) ->&rq->__lock ->(work_completion)(&map->work) ->&cfs_rq->removed.lock ->(work_completion)(&ctx->exit_work) ->(work_completion)(&pool->idle_cull_work) FD: 257 BD: 2 +.+.: (work_completion)(&(&kfence_timer)->work) ->cpu_hotplug_lock ->allocation_wait.lock ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 27 BD: 3 -.-.: allocation_wait.lock ->&p->pi_lock FD: 3 BD: 2917 ..-.: batched_entropy_u8.lock ->crngs.lock FD: 1 BD: 2923 -.-.: kfence_freelist_lock FD: 1 BD: 624 -.-.: &meta->lock FD: 48 BD: 1 +.+.: rcu_tasks.tasks_gp_mutex ->&obj_hash[i].lock ->&base->lock ->tasks_rcu_exit_srcu_srcu_usage.lock ->&ACCESS_PRIVATE(sdp, lock) ->tasks_rcu_exit_srcu ->&x->wait#3 ->&rq->__lock ->kernel/rcu/tasks.h:152 ->(&timer.timer) ->rcu_tasks__percpu.cbs_pcpu_lock ->&x->wait#2 ->(console_sem).lock FD: 1 BD: 1 ....: rcu_tasks.cbs_gbl_lock FD: 14 BD: 3 ..-.: rcu_tasks__percpu.cbs_pcpu_lock ->&obj_hash[i].lock ->&base->lock FD: 27 BD: 139 ..-.: &x->wait#2 ->&p->pi_lock FD: 27 BD: 136 ....: &rnp->exp_wq[2] ->&p->pi_lock FD: 31 BD: 6 ....: tasks_rcu_exit_srcu_srcu_usage.lock ->&obj_hash[i].lock FD: 1 BD: 21 ....: &ACCESS_PRIVATE(sdp, lock) FD: 1 BD: 2 ....: tasks_rcu_exit_srcu FD: 27 BD: 21 ....: &x->wait#3 ->&p->pi_lock FD: 256 BD: 1 +.+.: rcu_tasks_trace.tasks_gp_mutex ->cpu_hotplug_lock ->rcu_tasks_trace__percpu.cbs_pcpu_lock ->&x->wait#2 ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->(&timer.timer) ->(console_sem).lock FD: 1 BD: 1 ....: rcu_tasks_trace.cbs_gbl_lock FD: 38 BD: 2 +.+.: (work_completion)(&(&ssp->srcu_sup->work)->work) ->&ssp->srcu_sup->srcu_gp_mutex ->&ssp->srcu_sup->srcu_cb_mutex ->tasks_rcu_exit_srcu_srcu_usage.lock ->remove_cache_srcu_srcu_usage.lock ->&obj_hash[i].lock ->&base->lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 37 BD: 3 +.+.: &ssp->srcu_sup->srcu_gp_mutex ->tasks_rcu_exit_srcu_srcu_usage.lock ->&ssp->srcu_sup->srcu_cb_mutex ->remove_cache_srcu_srcu_usage.lock ->&rq->__lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 36 BD: 4 +.+.: &ssp->srcu_sup->srcu_cb_mutex ->tasks_rcu_exit_srcu_srcu_usage.lock ->remove_cache_srcu_srcu_usage.lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&cfs_rq->removed.lock FD: 31 BD: 2 +.+.: (work_completion)(&sdp->work) ->&ACCESS_PRIVATE(sdp, lock) ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 2 ....: kernel/rcu/tasks.h:152 FD: 27 BD: 212 +.-.: (&timer.timer) ->&p->pi_lock FD: 31 BD: 1 ..-.: &(&kfence_timer)->timer FD: 27 BD: 136 ....: &rnp->exp_wq[3] ->&p->pi_lock FD: 14 BD: 100 ..-.: rcu_tasks_trace__percpu.cbs_pcpu_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 99 ....: &ACCESS_PRIVATE(rtpcp, lock) FD: 1 BD: 1 +.+.: (memory_chain).rwsem FD: 68 BD: 101 +.+.: smpboot_threads_lock ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock ->hrtimer_bases.lock FD: 27 BD: 459 -.-.: &rcu_state.gp_wq ->&p->pi_lock FD: 26 BD: 102 -.-.: &stop_pi_lock ->&rq->__lock FD: 1 BD: 102 -.-.: &stopper->lock FD: 1 BD: 2 +.+.: (module_notify_list).rwsem FD: 1 BD: 1 +.+.: ddebug_lock FD: 2 BD: 1 +.+.: cci_probing ->devtree_lock FD: 1 BD: 1 +.+.: ptlock_ptr(ptdesc) FD: 256 BD: 1 +.+.: watchdog_mutex ->cpu_hotplug_lock FD: 863 BD: 1 +.+.: (wq_completion)events ->(work_completion)(&sscs.work) ->rdist_memreserve_cpuhp_cleanup_work ->(shepherd).work ->(work_completion)(&(&group->avgs_work)->work) ->(work_completion)(&rfkill_global_led_trigger_work) ->timer_update_work ->pcpu_balance_work ->(work_completion)(&p->wq) ->(debug_obj_work).work ->(work_completion)(&helper->damage_work) ->(work_completion)(&rfkill->sync_work) ->(linkwatch_work).work ->(work_completion)(&w->work) ->(work_completion)(&gadget->work) ->kernfs_notify_work ->async_lookup_work ->autoload_work ->(work_completion)(&barr->work) ->drain_vmap_work ->netstamp_work ->reg_work ->(work_completion)(&fw_work->work) ->(work_completion)(&s->destroy_work) ->(work_completion)(&(&krcp->monitor_work)->work) ->(work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) ->(work_completion)(&(&ovs_net->masks_rebalance)->work) ->(work_completion)(&ht->run_work) ->(work_completion)(&aux->work) ->(work_completion)(&w->work)#2 ->(deferred_probe_timeout_work).work ->(regulator_init_complete_work).work ->(work_completion)(&cgrp->bpf.release_work) ->(work_completion)(&w->w) ->(work_completion)(&sbi->s_sb_upd_work) ->deferred_process_work ->(work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) ->(work_completion)(&data->fib_event_work) ->(work_completion)(&(&hwstats->traffic_dw)->work) ->(work_completion)(&rdev->wiphy_work) ->wireless_nlevent_work ->(work_completion)(&(&conn->info_timer)->work) ->free_ipc_work ->(work_completion)(&(&devlink->rwork)->work) ->fqdir_free_work ->&rq->__lock ->(work_completion)(&aux->work)#2 ->(ima_keys_delayed_work).work ->(work_completion)(&ns->work) ->(work_completion)(&nlk->work) ->(work_completion)(&vmpr->work) ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((&memcg_stock) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&memcg_stock))) *)((&memcg_stock))); (typeof((typeof(*((&memcg_stock))) *)((&memcg_stock)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&mm->async_put_work) FD: 17 BD: 2 +.+.: (work_completion)(&sscs.work) ->&x->wait#4 ->&obj_hash[i].lock ->hrtimer_bases.lock ->&x->wait#5 FD: 1 BD: 102 -.-.: &x->wait#4 FD: 1 BD: 100 ....: &x->wait#5 FD: 2 BD: 153 +.+.: &newf->file_lock ->&newf->resize_wait FD: 1 BD: 1 ....: &p->vtime.seqcount FD: 40 BD: 99 +.+.: mem_hotplug_lock ->mem_hotplug_lock.rss.gp_wait.lock FD: 3 BD: 100 ..-.: mem_hotplug_lock.rss.gp_wait.lock ->&obj_hash[i].lock FD: 1 BD: 99 ....: mem_hotplug_lock.waiters.lock FD: 257 BD: 7 +.+.: cpu_add_remove_lock ->cpu_hotplug_lock ->cpu_hotplug_lock.waiters.lock ->cpu_hotplug_lock.rss.gp_wait.lock ->cpuset_hotplug_work ->&rq->__lock FD: 3 BD: 99 ..-.: cpu_hotplug_lock.rss.gp_wait.lock ->&obj_hash[i].lock FD: 27 BD: 99 ....: cpu_hotplug_lock.waiters.lock ->&p->pi_lock FD: 1 BD: 8 +.+.: cpuset_hotplug_work FD: 1 BD: 1 ....: rcu_callback FD: 1 BD: 100 +.+.: pcp_batch_high_lock FD: 27 BD: 106 ....: (cpu_running).wait.lock ->&p->pi_lock FD: 27 BD: 106 ....: &x->wait#6 ->&p->pi_lock FD: 1 BD: 2811 -.-.: &cfs_rq->removed.lock FD: 1 BD: 105 ....: &x->wait#7 FD: 16 BD: 2811 -...: &rt_b->rt_runtime_lock ->&rt_rq->rt_runtime_lock ->tk_core.seq.seqcount ->hrtimer_bases.lock FD: 1 BD: 2812 -...: &rt_rq->rt_runtime_lock FD: 27 BD: 618 -.-.: &rcu_state.expedited_wq ->&p->pi_lock FD: 30 BD: 99 +.+.: stop_cpus_mutex ->&stopper->lock ->&stop_pi_lock ->&rq->__lock ->&x->wait#8 FD: 27 BD: 101 ....: &x->wait#8 ->&p->pi_lock FD: 256 BD: 2 +.+.: rdist_memreserve_cpuhp_cleanup_work ->cpu_hotplug_lock FD: 76 BD: 1 +.+.: sched_domains_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->pcpu_lock FD: 1 BD: 2811 ....: &cp->lock FD: 93 BD: 1 +.+.: &type->s_umount_key#5/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&obj_hash[i].lock ->percpu_counters_lock ->crngs.lock ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#5 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&dentry->d_lock FD: 38 BD: 152 +.+.: &sb->s_type->i_lock_key#5 ->&dentry->d_lock FD: 27 BD: 1 ....: (setup_done).wait.lock ->&p->pi_lock FD: 92 BD: 26 ++++: namespace_sem ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->mnt_id_ida.xa_lock ->pcpu_alloc_mutex ->&dentry->d_lock ->mount_lock ->rename_lock ->&obj_hash[i].lock ->&rq->__lock ->namespace_sem.wait_lock ->&____s->seqcount#2 ->&n->list_lock ->stock_lock ->&cfs_rq->removed.lock ->pcpu_alloc_mutex.wait_lock ->&p->pi_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu FD: 1 BD: 146 +.+.: &____s->seqcount#3 FD: 77 BD: 1 +.+.: &type->s_umount_key#6 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&____s->seqcount ->&c->lock ->&lru->node[i].lock ->&sbinfo->stat_lock ->&obj_hash[i].lock FD: 27 BD: 653 +.+.: &lru->node[i].lock FD: 110 BD: 7 ++++: &sb->s_type->i_mutex_key ->namespace_sem ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->tomoyo_ss ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#2 ->&wb->list_lock ->&c->lock ->&____s->seqcount ->&rq->__lock FD: 39 BD: 24 +.+.: rename_lock ->rename_lock.seqcount ->&dentry->d_lock FD: 38 BD: 153 +.+.: rename_lock.seqcount ->&dentry->d_lock ->&dentry->d_lock/2 FD: 1 BD: 138 ....: &new_ns->poll FD: 39 BD: 146 +.+.: &fs->lock ->&____s->seqcount#3 ->&dentry->d_lock FD: 1 BD: 90 +.+.: req_lock FD: 126 BD: 1 +.+.: of_mutex ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem FD: 1 BD: 164 ....: &x->wait#9 FD: 1 BD: 192 +.+.: &k->list_lock FD: 26 BD: 167 ++++: bus_type_sem ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 33 BD: 300 -...: &dev->power.lock ->&dev->power.wait_queue ->hrtimer_bases.lock ->&dev->power.lock/1 FD: 35 BD: 167 +.+.: dpm_list_mtx ->&dev->devres_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->dpm_list_mtx.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 78 BD: 172 +.+.: uevent_sock_mutex ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->&rq->__lock ->&cfs_rq->removed.lock ->rcu_node_0 ->rlock-AF_NETLINK ->&n->list_lock ->quarantine_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->uevent_sock_mutex.wait_lock ->&____s->seqcount#2 ->mmu_notifier_invalidate_range_start ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&base->lock FD: 1 BD: 147 ....: running_helpers_waitq.lock FD: 1 BD: 179 +.+.: sysfs_symlink_target_lock FD: 2 BD: 235 +.+.: &k->k_lock ->klist_remove_lock FD: 1 BD: 1 ....: &dev->mutex FD: 1 BD: 1 +.+.: subsys mutex FD: 2 BD: 1 +.+.: memory_blocks.xa_lock ->pool_lock#2 FD: 1 BD: 1 +.+.: subsys mutex#2 FD: 1 BD: 1 +.+.: subsys mutex#3 FD: 74 BD: 95 +.+.: dev_pm_qos_mtx ->fs_reclaim ->pool_lock#2 ->&dev->power.lock ->pm_qos_lock ->&c->lock ->&____s->seqcount ->&rq->__lock FD: 1 BD: 96 ....: pm_qos_lock FD: 129 BD: 93 +.+.: dev_pm_qos_sysfs_mtx ->dev_pm_qos_mtx ->&root->kernfs_rwsem ->fs_reclaim ->pool_lock#2 ->lock ->&c->lock ->&____s->seqcount ->&sem->wait_lock ->&p->pi_lock ->&rq->__lock ->dev_pm_qos_sysfs_mtx.wait_lock ->rcu_node_0 FD: 68 BD: 13 +.+.: register_lock ->proc_subdir_lock ->fs_reclaim ->pool_lock#2 ->proc_inum_ida.xa_lock ->&c->lock ->&____s->seqcount ->&rq->__lock FD: 1 BD: 1 +.+.: (cpufreq_policy_notifier_list).rwsem FD: 721 BD: 4 ++++: (pm_chain_head).rwsem ->tk_core.seq.seqcount ->input_pool.lock ->cpu_add_remove_lock ->thermal_list_lock ->fw_lock ->&obj_hash[i].lock ->(work_completion)(&(&fw_cache.work)->work) ->async_lock ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->rcu_state.barrier_mutex ->&base->lock ->rcu_state.barrier_mutex.wait_lock ->&rq->__lock ->rcu_node_0 ->base_crng.lock ->(console_sem).lock ->console_owner_lock ->console_owner ->(pm_chain_head).rwsem.wait_lock ->&cfs_rq->removed.lock ->pool_lock#2 ->key ->pcpu_lock ->percpu_counters_lock FD: 1 BD: 1 +.+.: cpufreq_governor_mutex FD: 41 BD: 2 +.+.: (work_completion)(&rew->rew_work) ->rcu_node_0 ->rcu_state.exp_wake_mutex ->&rcu_state.expedited_wq ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->pool_lock#2 ->pool_lock ->&cfs_rq->removed.lock ->rcu_state.exp_wake_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 2 ++++: binfmt_lock FD: 1 BD: 79 +.+.: pin_fs_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#7/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->&____s->seqcount ->&c->lock ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#6 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 3 +.+.: &sb->s_type->i_lock_key#6 ->&dentry->d_lock FD: 79 BD: 1 +.+.: &sb->s_type->i_mutex_key#2 ->&sb->s_type->i_lock_key#6 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&c->lock ->&____s->seqcount FD: 27 BD: 406 ....: &wq ->&p->pi_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#8/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#7 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 152 +.+.: &sb->s_type->i_lock_key#7 ->&dentry->d_lock ->&p->pi_lock FD: 113 BD: 75 +.+.: &sb->s_type->i_mutex_key#3 ->&sb->s_type->i_lock_key#7 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&c->lock ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&n->list_lock ->rcu_node_0 ->pin_fs_lock ->mount_lock ->&fsnotify_mark_srcu ->&xa->xa_lock#9 ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->remove_cache_srcu ->&rcu_state.gp_wq ->&mapping->i_mmap_rwsem ->&wb->list_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock FD: 1 BD: 22 +.+.: chrdevs_lock FD: 760 BD: 2 ++++: cb_lock ->genl_mutex ->fs_reclaim ->&c->lock ->pool_lock#2 ->rlock-AF_NETLINK ->rtnl_mutex ->&obj_hash[i].lock ->&____s->seqcount ->&n->list_lock ->&rdev->wiphy.mtx ->nlk_cb_mutex-GENERIC ->&xa->xa_lock#16 ->genl_mutex.wait_lock ->&p->pi_lock ->rtnl_mutex.wait_lock ->&rq->__lock ->&lock->wait_lock ->&____s->seqcount#2 ->rcu_node_0 ->quarantine_lock ->remove_cache_srcu ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&dir->lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->(console_sem).lock ->console_owner_lock ->console_owner FD: 747 BD: 4 +.+.: genl_mutex ->fs_reclaim ->pool_lock#2 ->nl_table_lock ->nl_table_wait.lock ->&c->lock ->rlock-AF_NETLINK ->&obj_hash[i].lock ->&____s->seqcount ->&n->list_lock ->&zone->lock ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->genl_mutex.wait_lock ->hwsim_radio_lock ->&x->wait#9 ->batched_entropy_u32.lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->subsys mutex#53 ->device_links_lock ->&k->k_lock ->deferred_probe_mutex ->pcpu_alloc_mutex ->cpu_hotplug_lock ->wq_pool_mutex ->crngs.lock ->triggers_list_lock ->leds_list_lock ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->(inetaddr_chain).rwsem ->inet6addr_chain.lock ->&____s->seqcount#2 ->quarantine_lock ->remove_cache_srcu ->rcu_node_0 ->&pernet->lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->batched_entropy_u8.lock ->kfence_freelist_lock ->&base->lock ->&meta->lock ->uevent_sock_mutex.wait_lock ->&sem->wait_lock ->(console_sem).lock ->console_owner_lock ->console_owner ->nbd_index_mutex ->&fn->fou_lock ->&nbd->config_lock ->&x->wait#2 ->&lock->wait_lock FD: 1 BD: 4 +.+.: subsys mutex#4 FD: 4 BD: 9 ....: async_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 3 +.+.: regulator_list_mutex FD: 437 BD: 2 +.+.: (work_completion)(&entry->work) ->tk_core.seq.seqcount ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&____s->seqcount ->&k->list_lock ->lock ->&root->kernfs_rwsem ->&c->lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->async_lock ->async_done.lock ->&dentry->d_lock ->&sb->s_type->i_mutex_key ->sb_writers#2 ->&sb->s_type->i_lock_key#2 ->&rq->__lock FD: 26 BD: 19 .+.+: device_links_srcu ->&rq->__lock FD: 28 BD: 18 +.+.: fwnode_link_lock ->&k->k_lock ->&rq->__lock FD: 29 BD: 95 +.+.: device_links_lock ->&k->list_lock ->&k->k_lock ->&rq->__lock FD: 1 BD: 169 ....: &dev->devres_lock FD: 26 BD: 18 +.+.: pinctrl_list_mutex ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock FD: 1 BD: 18 +.+.: pinctrl_maps_mutex FD: 1 BD: 4 +.+.: regulator_nesting_mutex FD: 2 BD: 1 +.+.: regulator_ww_class_mutex ->regulator_nesting_mutex FD: 130 BD: 95 +.+.: gdp_mutex ->&k->list_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->kobj_ns_type_lock ->sysfs_symlink_target_lock ->&sem->wait_lock ->&p->pi_lock ->kernfs_idr_lock ->&obj_hash[i].lock ->&____s->seqcount#2 ->&rq->__lock ->remove_cache_srcu ->gdp_mutex.wait_lock ->&n->list_lock ->&cfs_rq->removed.lock FD: 1 BD: 1 +.+.: (reboot_notifier_list).rwsem FD: 3 BD: 3 +.+.: subsys mutex#5 ->&k->k_lock FD: 26 BD: 96 +.+.: deferred_probe_mutex ->&rq->__lock ->&cfs_rq->removed.lock FD: 27 BD: 18 ....: probe_waitqueue.lock ->&p->pi_lock FD: 27 BD: 3 ....: async_done.lock ->&p->pi_lock FD: 93 BD: 1 +.+.: &type->s_umount_key#9/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 39 BD: 56 +.+.: &sb->s_type->i_lock_key#8 ->&dentry->d_lock ->&p->pi_lock ->bit_wait_table + i FD: 39 BD: 7 +.+.: vmap_purge_lock ->purge_vmap_area_lock ->free_vmap_area_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->vmap_purge_lock.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 4 +.+.: &fp->aux->used_maps_mutex FD: 1 BD: 1 +.+.: proto_list_mutex FD: 1 BD: 1 +.+.: targets_mutex FD: 29 BD: 277 ...-: nl_table_lock ->pool_lock#2 ->nl_table_wait.lock ->&obj_hash[i].lock ->&meta->lock ->kfence_freelist_lock FD: 27 BD: 278 ..-.: nl_table_wait.lock ->&p->pi_lock FD: 1 BD: 1 +.+.: net_family_lock FD: 8 BD: 5 ....: net_generic_ids.xa_lock ->&c->lock ->&____s->seqcount ->pool_lock#2 FD: 6 BD: 66 ..-.: &dir->lock ->&obj_hash[i].lock ->&____s->seqcount ->pool_lock#2 ->quarantine_lock FD: 36 BD: 5 +.+.: k-sk_lock-AF_NETLINK ->k-slock-AF_NETLINK ->&rq->__lock FD: 1 BD: 6 +...: k-slock-AF_NETLINK FD: 2 BD: 247 ..-.: rhashtable_bucket ->rhashtable_bucket/1 FD: 1 BD: 54 ....: &cma->lock FD: 46 BD: 54 +.+.: cma_mutex ->&zone->lock ->&obj_hash[i].lock ->lock#2 FD: 28 BD: 1 +.+.: pcpu_drain_mutex ->&pcp->lock FD: 40 BD: 56 +.+.: lock#2 ->&obj_hash[i].lock ->(work_completion)(work) ->&x->wait#10 ->&rq->__lock FD: 1 BD: 1 +.+.: &pool->lock#2 FD: 1 BD: 156 ....: freezer_lock FD: 1 BD: 1 ....: audit_backlog_wait.lock FD: 1 BD: 1 ....: &list->lock FD: 27 BD: 1 ....: kauditd_wait.lock ->&p->pi_lock FD: 1 BD: 1 ....: printk_ratelimit_state.lock FD: 3 BD: 2 +.+.: lock#3 ->&zone->lock FD: 70 BD: 1 +.+.: khugepaged_mutex ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock ->lock#3 ->pcp_batch_high_lock FD: 163 BD: 9 ++++: &(&priv->bus_notifier)->rwsem ->&device->physical_node_lock ->iommu_probe_device_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->i2c_dev_list_lock ->&x->wait#9 ->&obj_hash[i].lock ->chrdevs_lock ->&k->list_lock ->gdp_mutex ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#61 ->mmu_notifier_invalidate_range_start ->&n->list_lock ->&dum_hcd->dum->lock ->&x->wait#19 ->&base->lock ->(&timer.timer) ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->remove_cache_srcu ->&sem->wait_lock ->&____s->seqcount#2 FD: 3 BD: 6 +.+.: subsys mutex#6 ->&k->k_lock FD: 4 BD: 1 +.+.: subsys mutex#7 ->&k->list_lock ->&k->k_lock FD: 1 BD: 1 +.+.: regmap_debugfs_early_lock FD: 1 BD: 1 +.+.: (acpi_reconfig_chain).rwsem FD: 1 BD: 1 +.+.: __i2c_board_lock FD: 1 BD: 598 -.-.: quarantine_lock FD: 69 BD: 1 +.+.: core_lock ->&k->list_lock ->&k->k_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 36 BD: 421 .+.+: remove_cache_srcu ->quarantine_lock ->&c->lock ->&n->list_lock ->&obj_hash[i].lock ->&rq->__lock ->pool_lock#2 ->pool_lock ->&____s->seqcount ->rcu_node_0 ->&base->lock ->&cfs_rq->removed.lock ->&meta->lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 27 BD: 1 +.+.: thermal_governor_lock ->thermal_list_lock FD: 26 BD: 6 +.+.: thermal_list_lock ->&rq->__lock FD: 34 BD: 1 +.+.: cpuidle_lock ->&obj_hash[i].lock ->(console_sem).lock FD: 69 BD: 1 +.+.: k-sk_lock-AF_QIPCRTR ->k-slock-AF_QIPCRTR ->fs_reclaim ->qrtr_ports.xa_lock ->pool_lock#2 ->qrtr_node_lock ->&obj_hash[i].lock FD: 1 BD: 2 +...: k-slock-AF_QIPCRTR FD: 1 BD: 2 +.+.: qrtr_ports.xa_lock FD: 1 BD: 2 +.+.: qrtr_node_lock FD: 67 BD: 107 ++++: (crypto_chain).rwsem ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock ->&n->list_lock ->quarantine_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->remove_cache_srcu FD: 329 BD: 2 +.+.: tty_mutex ->(console_sem).lock ->console_lock ->fs_reclaim ->pool_lock#2 ->tty_ldiscs_lock ->&obj_hash[i].lock ->&k->list_lock ->&k->k_lock ->&tty->legacy_mutex FD: 257 BD: 1 +.+.: iova_cache_mutex ->cpu_hotplug_lock ->slab_mutex FD: 3 BD: 1 +.+.: subsys mutex#8 ->&k->k_lock FD: 1 BD: 1 ..-.: uidhash_lock FD: 35 BD: 2 +.+.: (work_completion)(&s->destroy_work) ->&obj_hash[i].lock ->pool_lock#2 ->&rsp->gp_wait ->pcpu_lock ->quarantine_lock ->&rq->__lock FD: 69 BD: 1 +.+.: (work_completion)(&eval_map_work) ->trace_event_sem FD: 1 BD: 1 ....: oom_reaper_wait.lock FD: 1 BD: 1 +.+.: subsys mutex#9 FD: 1 BD: 1 ....: &pgdat->kcompactd_wait FD: 1 BD: 1 ....: hugetlb_lock FD: 145 BD: 1 +.+.: memory_tier_lock ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#10 FD: 1 BD: 2 +.+.: subsys mutex#10 FD: 1 BD: 1 +.+.: ksm_thread_mutex FD: 1 BD: 1 ....: ksm_thread_wait.lock FD: 1 BD: 2 +.+.: damon_ops_lock FD: 68 BD: 106 ++++: crypto_alg_sem ->(crypto_chain).rwsem ->&rq->__lock FD: 1 BD: 1 +.+.: khugepaged_mm_lock FD: 1 BD: 1 ....: khugepaged_wait.lock FD: 135 BD: 8 +.+.: bio_slab_lock ->fs_reclaim ->pool_lock#2 ->slab_mutex ->bio_slabs.xa_lock FD: 2 BD: 9 +.+.: bio_slabs.xa_lock ->pool_lock#2 FD: 67 BD: 1 +.+.: major_names_lock ->fs_reclaim ->pool_lock#2 ->major_names_spinlock ->&c->lock ->&____s->seqcount FD: 1 BD: 2 +.+.: major_names_spinlock FD: 16 BD: 1 +.-.: (&rtpcp->lazy_timer) ->rcu_tasks_trace__percpu.cbs_pcpu_lock ->rcu_tasks__percpu.cbs_pcpu_lock FD: 68 BD: 1 +.+.: &pgdat->kswapd_lock ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock FD: 31 BD: 1 ..-.: drivers/char/random.c:251 FD: 15 BD: 2 +.+.: (next_reseed).work ->&obj_hash[i].lock ->&base->lock ->input_pool.lock ->base_crng.lock FD: 31 BD: 1 ..-.: mm/vmstat.c:2022 FD: 256 BD: 2 +.+.: (shepherd).work ->cpu_hotplug_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 36 BD: 57 +.+.: (wq_completion)mm_percpu_wq ->(work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) ->(work_completion)(work) ->(work_completion)(&barr->work) ->&rq->__lock FD: 28 BD: 58 +.+.: (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) ->&obj_hash[i].lock ->&base->lock ->&pcp->lock ->&rq->__lock FD: 3 BD: 6 +.+.: subsys mutex#11 ->&k->k_lock FD: 1 BD: 1 -...: shrink_qlist.lock FD: 31 BD: 5 ....: remove_cache_srcu_srcu_usage.lock ->&obj_hash[i].lock FD: 34 BD: 99 +.+.: flush_lock ->&obj_hash[i].lock ->(work_completion)(&sfw->work) ->&x->wait#10 ->&rq->__lock FD: 9 BD: 101 +.+.: (work_completion)(&sfw->work) ->&c->lock ->&n->list_lock ->&obj_hash[i].lock FD: 30 BD: 100 +.+.: (wq_completion)slub_flushwq ->(work_completion)(&sfw->work) ->(work_completion)(&barr->work) FD: 27 BD: 626 ....: &x->wait#10 ->&p->pi_lock FD: 28 BD: 105 +.+.: (work_completion)(&barr->work) ->&x->wait#10 ->&rq->__lock FD: 31 BD: 1 ..-.: &(&ssp->srcu_sup->work)->timer FD: 1 BD: 1 +.+.: prepare_lock FD: 114 BD: 1 +.+.: clk_debug_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 FD: 1 BD: 1 +.+.: clocks_mutex FD: 387 BD: 1 +.+.: acpi_scan_lock ->semaphore->lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&x->wait#9 ->acpi_device_lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->subsys mutex#12 ->uevent_sock_mutex ->running_helpers_waitq.lock ->*(&acpi_gbl_reference_count_lock) ->&n->list_lock ->acpi_ioremap_lock ->quarantine_lock ->&device->physical_node_lock ->irq_domain_mutex ->&domain->mutex ->resource_lock ->&(&priv->bus_notifier)->rwsem ->fwnode_link_lock ->device_links_srcu ->acpi_pm_notifier_install_lock ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->subsys mutex#4 ->(console_sem).lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->io_range_mutex ->pci_bus_sem ->gdp_mutex ->subsys mutex#17 ->acpi_hp_context_lock ->bridge_mutex ->pci_lock ->pci_acpi_companion_lookup_sem ->pci_slot_mutex ->resource_alignment_lock ->iort_msi_chip_lock ->subsys mutex#18 ->devtree_lock ->pci_rescan_remove_lock ->acpi_link_lock ->acpi_dep_list_lock ->power_resource_list_lock FD: 68 BD: 2 +.+.: acpi_device_lock ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#2 ->semaphore->lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount FD: 1 BD: 3 ....: &xa->xa_lock#2 FD: 1 BD: 2 +.+.: subsys mutex#12 FD: 127 BD: 29 +.+.: &device->physical_node_lock ->sysfs_symlink_target_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&rq->__lock ->&cfs_rq->removed.lock FD: 150 BD: 2 +.+.: acpi_pm_notifier_install_lock ->semaphore->lock ->fs_reclaim ->pool_lock#2 ->*(&acpi_gbl_reference_count_lock) ->acpi_pm_notifier_lock ->&c->lock ->&____s->seqcount FD: 147 BD: 3 +.+.: acpi_pm_notifier_lock ->fs_reclaim ->pool_lock#2 ->wakeup_ida.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#13 ->events_lock FD: 1 BD: 5 ....: wakeup_ida.xa_lock FD: 3 BD: 5 +.+.: subsys mutex#13 ->&k->k_lock FD: 1 BD: 5 ....: events_lock FD: 1 BD: 2 +.+.: acpi_wakeup_lock FD: 185 BD: 2 +.+.: port_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#14 ->&xa->xa_lock#3 ->&port->mutex FD: 1 BD: 3 +.+.: subsys mutex#14 FD: 1 BD: 3 ....: &xa->xa_lock#3 FD: 1 BD: 301 ....: &dev->power.wait_queue FD: 96 BD: 1 +.+.: (wq_completion)pm ->(work_completion)(&dev->power.work) ->(work_completion)(&hcd->wakeup_work) FD: 90 BD: 2 +.+.: (work_completion)(&dev->power.work) ->&dev->power.lock ->&hub->irq_urb_lock ->(&hub->irq_urb_retry) ->&obj_hash[i].lock ->&base->lock ->hcd_urb_unlink_lock ->hcd_root_hub_lock ->&rq->__lock ->(work_completion)(&hub->tt.clear_work) ->&dum_hcd->dum->lock ->device_state_lock ->hcd_urb_list_lock ->usb_kill_urb_queue.lock ->&vhci_hcd->vhci->lock ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->fs_reclaim ->&x->wait#19 ->(&timer.timer) ->&c->lock ->&____s->seqcount ->&port_lock_key FD: 177 BD: 9 +.+.: &port->mutex ->fs_reclaim ->pool_lock#2 ->(console_sem).lock ->&port_lock_key ->console_mutex ->ctrl_ida.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->&____s->seqcount ->&dev->power.lock ->&k->list_lock ->&c->lock ->lock ->&root->kernfs_rwsem ->&device->physical_node_lock ->semaphore->lock ->sysfs_symlink_target_lock ->&k->k_lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#15 ->*(&acpi_gbl_reference_count_lock) ->&n->list_lock ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start ->gdp_mutex ->bus_type_sem ->&rq->__lock ->req_lock ->&p->pi_lock ->&x->wait#11 ->subsys mutex#16 ->chrdevs_lock ->&cfs_rq->removed.lock ->&desc->request_mutex ->register_lock ->&irq_desc_lock_class ->proc_subdir_lock ->proc_inum_ida.xa_lock FD: 36 BD: 283 ..-.: &port_lock_key ->&dev->power.lock ->&port->lock ->&tty->write_wait FD: 26 BD: 11 +.+.: syslog_lock ->&rq->__lock FD: 38 BD: 279 ..-.: console_owner ->&port_lock_key ->console_owner_lock FD: 31 BD: 1 ..-.: &(&group->avgs_work)->timer FD: 31 BD: 1 ..-.: &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer FD: 31 BD: 1 ..-.: mm/memcontrol.c:679 FD: 27 BD: 2 +.+.: (stats_flush_dwork).work ->cgroup_rstat_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 2 BD: 108 ....: cgroup_rstat_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 2828 -.-.: per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 28 BD: 2 +.+.: (work_completion)(&(&group->avgs_work)->work) ->&group->avgs_lock ->&rq->__lock FD: 27 BD: 3 +.+.: &group->avgs_lock ->&per_cpu_ptr(group->pcpu, cpu)->seq ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->rcu_node_0 FD: 1 BD: 10 ....: ctrl_ida.xa_lock FD: 1 BD: 10 +.+.: subsys mutex#15 FD: 1 BD: 236 +.+.: klist_remove_lock FD: 27 BD: 90 ....: &x->wait#11 ->&p->pi_lock FD: 122 BD: 1 .+.+: sb_writers ->mount_lock ->&type->i_mutex_dir_key/1 ->&sb->s_type->i_mutex_key#4 ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#5 ->&wb->list_lock ->&type->i_mutex_dir_key#2 ->&sem->wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&s->s_inode_list_lock ->&obj_hash[i].lock ->&sbinfo->stat_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->pool_lock#2 FD: 113 BD: 2 +.+.: &type->i_mutex_dir_key/1 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&obj_hash[i].lock ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#5 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&c->lock ->&____s->seqcount ->&xattrs->lock ->&simple_offset_xa_lock ->smack_known_lock ->&sb->s_type->i_mutex_key#4 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->tomoyo_ss ->&u->bindlock ->&n->list_lock ->&sem->wait_lock ->&rq->__lock ->remove_cache_srcu ->rcu_node_0 ->&fsnotify_mark_srcu ->&xa->xa_lock#9 ->&type->i_mutex_dir_key#2 ->&____s->seqcount#2 ->batched_entropy_u8.lock FD: 1 BD: 11 ++++: &xattrs->lock FD: 13 BD: 11 +.+.: &simple_offset_xa_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 FD: 36 BD: 119 +.+.: smack_known_lock ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->&rq->__lock ->smack_known_lock.wait_lock ->&base->lock ->&cfs_rq->removed.lock ->quarantine_lock ->rcu_node_0 ->key ->pcpu_lock ->percpu_counters_lock ->&meta->lock ->kfence_freelist_lock ->stock_lock ->&rcu_state.expedited_wq FD: 30 BD: 3 +.+.: &sb->s_type->i_mutex_key#4 ->tk_core.seq.seqcount ->tomoyo_ss ->&xattrs->lock ->&rq->__lock ->&simple_offset_xa_lock ->&dentry->d_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 3 BD: 10 +.+.: subsys mutex#16 ->&k->k_lock FD: 1 BD: 2 +.+.: io_range_mutex FD: 1 BD: 2 ++++: pci_bus_sem FD: 3 BD: 2 +.+.: subsys mutex#17 ->&k->k_lock FD: 66 BD: 2 +.+.: acpi_hp_context_lock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 2 +.+.: bridge_mutex FD: 1 BD: 13 ....: pci_lock FD: 1 BD: 2 .+.+: pci_acpi_companion_lookup_sem FD: 1 BD: 2 +.+.: pci_slot_mutex FD: 1 BD: 2 +.+.: resource_alignment_lock FD: 1 BD: 301 ....: &dev->power.lock/1 FD: 1 BD: 2 +.+.: subsys mutex#18 FD: 37 BD: 2 +.+.: pci_rescan_remove_lock FD: 81 BD: 4 +.+.: acpi_link_lock ->fs_reclaim ->pool_lock#2 ->semaphore->lock ->&obj_hash[i].lock ->*(&acpi_gbl_reference_count_lock) ->(console_sem).lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 2 +.+.: acpi_dep_list_lock FD: 1 BD: 2 +.+.: power_resource_list_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#10/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#9 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#9 ->&dentry->d_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#11/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#10 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&c->lock ->&____s->seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#10 ->&dentry->d_lock FD: 210 BD: 100 ++++: &mm->mmap_lock ->reservation_ww_class_acquire ->fs_reclaim ->pool_lock#2 ->&vma->vm_lock->lock ->&____s->seqcount ->&mm->page_table_lock ->&c->lock ->ptlock_ptr(ptdesc)#2 ->&anon_vma->rwsem ->mmu_notifier_invalidate_range_start ->lock#4 ->lock#5 ->&obj_hash[i].lock ->&mapping->i_mmap_rwsem ->&p->alloc_lock ->&lruvec->lru_lock ->tk_core.seq.seqcount ->&rq->__lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&mm->mmap_lock/1 ->rcu_node_0 ->&n->list_lock ->remove_cache_srcu ->pool_lock ->quarantine_lock ->&sem->wait_lock ->&p->pi_lock ->&folio_wait_table[i] ->key ->pcpu_lock ->percpu_counters_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&rcu_state.expedited_wq ->&sb->s_type->i_lock_key ->&kcov->lock ->stock_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->&base->lock ->&xa->xa_lock#9 ->&info->lock ->&xa->xa_lock#5 ->&s->s_inode_list_lock ->batched_entropy_u32.lock ->&((cluster_info + ci)->lock)#2 ->&xa->xa_lock#23 ->&ctrl->lock#2 ->&tree->lock ->&acomp_ctx->mutex ->&cache->free_lock ->cgroup_file_kn_lock ->&vmpr->sr_lock ->&mapping->i_private_lock ->cgroup_rstat_lock ->memcg_oom_lock ->&rtpn->lock ->&cache->alloc_lock FD: 94 BD: 111 +.+.: reservation_ww_class_acquire ->reservation_ww_class_mutex FD: 93 BD: 112 +.+.: reservation_ww_class_mutex ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&xa->xa_lock#9 ->&sb->s_type->i_lock_key ->&info->lock ->lock#4 ->&rq->__lock ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock FD: 63 BD: 556 ++++: &mapping->i_mmap_rwsem ->&obj_hash[i].lock ->pool_lock#2 ->&anon_vma->rwsem ->&sem->wait_lock ->&rq->__lock ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->quarantine_lock ->&p->pi_lock ->ptlock_ptr(ptdesc)#2 ->mmu_notifier_invalidate_range_start FD: 1 BD: 576 +.+.: dma_fence_map FD: 1 BD: 533 ....: key FD: 1 BD: 1 +.+.: attribute_container_mutex FD: 26 BD: 15 +.+.: triggers_list_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 26 BD: 15 .+.+: leds_list_lock ->&rq->__lock FD: 165 BD: 4 ++++: (usb_notifier_list).rwsem ->fs_reclaim ->pool_lock#2 ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#57 ->mon_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->usbfs_mutex FD: 1 BD: 1 +.+.: rc_map_lock FD: 1 BD: 1 +.+.: subsys mutex#19 FD: 2 BD: 10 +.+.: iommu_probe_device_lock ->iommu_device_lock FD: 1 BD: 11 +.+.: iommu_device_lock FD: 1 BD: 7 ....: (efi_runtime_lock).lock FD: 27 BD: 9 ....: &x->wait#12 ->&p->pi_lock FD: 30 BD: 1 +.+.: (wq_completion)efi_rts_wq ->(work_completion)(&efi_rts_work.work) FD: 29 BD: 2 +.+.: (work_completion)(&efi_rts_work.work) ->cpu_asid_lock ->efi_rt_lock ->&x->wait#12 ->&rq->__lock FD: 1 BD: 2811 ....: cpu_asid_lock FD: 1 BD: 3 +.+.: efi_rt_lock FD: 1 BD: 6 ....: (efivars_lock).lock FD: 1 BD: 1 +.+.: devfreq_list_lock FD: 1 BD: 2 +.+.: &entry->access FD: 68 BD: 2 +.+.: info_mutex ->proc_subdir_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->proc_inum_ida.xa_lock FD: 1 BD: 96 +.+.: kobj_ns_type_lock FD: 10 BD: 62 +.+.: &xa->xa_lock#4 ->pool_lock#2 ->&c->lock ->&n->list_lock ->&obj_hash[i].lock FD: 28 BD: 56 +.+.: subsys mutex#20 ->&k->k_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 16 BD: 166 ..-.: &dir->lock#2 ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&base->lock FD: 36 BD: 62 +.+.: dev_hotplug_mutex ->&dev->power.lock ->&k->k_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 11 BD: 61 ++++: dev_base_lock ->&xa->xa_lock#4 FD: 1 BD: 1 +.+.: qdisc_mod_lock FD: 18 BD: 1 ++++: bt_proto_lock ->pool_lock#2 ->&dir->lock ->&obj_hash[i].lock ->chan_list_lock ->l2cap_sk_list.lock ->&sk->sk_peer_lock ->hci_sk_list.lock ->&c->lock ->&n->list_lock FD: 82 BD: 22 +.+.: hci_cb_list_lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->chan_list_lock ->&conn->ident_lock ->&base->lock ->&list->lock#8 ->&conn->chan_lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->(work_completion)(&(&conn->id_addr_timer)->work) ->&x->wait#2 ->(work_completion)(&(&conn->info_timer)->work) ->hci_cb_list_lock.wait_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->rcu_node_0 ->&n->list_lock ->&rnp->exp_lock ->&rnp->exp_wq[2] ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->quarantine_lock ->pool_lock FD: 1 BD: 1 +.+.: mgmt_chan_list_lock FD: 1 BD: 75 ....: &list->lock#2 FD: 66 BD: 54 +.+.: rate_ctrl_mutex ->fs_reclaim ->pool_lock#2 FD: 2 BD: 1 +.+.: netlbl_domhsh_lock ->pool_lock#2 FD: 1 BD: 52 +.+.: netlbl_unlhsh_lock FD: 769 BD: 1 +.+.: misc_mtx ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&____s->seqcount ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#21 ->misc_minors_ida.xa_lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&base->lock ->&dir->lock ->rfkill_global_mutex ->system_transition_mutex ->misc_mtx.wait_lock ->&____s->seqcount#2 ->nfc_index_ida.xa_lock ->pcpu_alloc_mutex ->cpu_hotplug_lock ->kthread_create_lock ->&x->wait ->wq_pool_mutex ->&n->list_lock ->nfc_devlist_mutex ->&k->k_lock ->llcp_devices_lock ->nl_table_lock ->nl_table_wait.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->remove_cache_srcu ->hrtimer_bases.lock ->pool_lock ->nfc_devlist_mutex.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&meta->lock ->quarantine_lock ->wq_pool_mutex.wait_lock ->(console_sem).lock ->&wq->mutex ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->wq_mayday_lock ->(wq_completion)nfc3_nci_rx_wq#185 ->(wq_completion)nfc3_nci_cmd_wq#188 ->&x->wait#2 ->(wq_completion)nfc3_nci_cmd_wq#197 ->pcpu_alloc_mutex.wait_lock ->(wq_completion)nfc4_nci_cmd_wq#50 ->(wq_completion)nfc5_nci_cmd_wq#20 ->(wq_completion)nfc3_nci_rx_wq#204 ->(wq_completion)nfc3_nci_cmd_wq#208 ->(wq_completion)nfc3_nci_rx_wq#220 ->(wq_completion)nfc3_nci_cmd_wq#224 ->(wq_completion)nfc20_nci_cmd_wq#8 ->(wq_completion)nfc20_nci_cmd_wq#9 ->(wq_completion)nfc6_nci_cmd_wq#19 ->(wq_completion)nfc6_nci_cmd_wq#20 ->(wq_completion)nfc4_nci_cmd_wq#223 ->(wq_completion)nfc2_nci_rx_wq#1087 ->(wq_completion)nfc2_nci_cmd_wq#1087 ->(wq_completion)nfc44_nci_cmd_wq ->(wq_completion)nfc3_nci_rx_wq#663 ->(wq_completion)nfc3_nci_cmd_wq#667 ->(wq_completion)nfc13_nci_cmd_wq#13 FD: 3 BD: 2 +.+.: subsys mutex#21 ->&k->k_lock FD: 152 BD: 1 +.+.: input_mutex ->input_devices_poll_wait.lock ->fs_reclaim ->pool_lock#2 ->&dev->mutex#2 ->input_ida.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->chrdevs_lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->&c->lock ->&____s->seqcount ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#31 FD: 1 BD: 2 ....: input_devices_poll_wait.lock FD: 50 BD: 2 ++++: (netlink_chain).rwsem ->hwsim_radio_lock ->pool_lock#2 ->&obj_hash[i].lock ->reg_indoor_lock ->&rq->__lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->rcu_node_0 ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->&n->list_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->&q->instances_lock ->&log->instances_lock ->&nft_net->commit_mutex FD: 15 BD: 1 ++++: proto_tab_lock ->&c->lock ->pool_lock#2 ->&dir->lock ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 3 BD: 1 ....: random_ready_notifier.lock ->crngs.lock FD: 1 BD: 2 ....: misc_minors_ida.xa_lock FD: 151 BD: 2 +.+.: (work_completion)(&rfkill_global_led_trigger_work) ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 150 BD: 14 +.+.: rfkill_global_mutex ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&c->lock ->&____s->seqcount ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->&rfkill->lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#40 ->triggers_list_lock ->leds_list_lock ->&rq->__lock ->rfkill_global_mutex.wait_lock ->&n->list_lock ->&data->mtx ->&____s->seqcount#2 ->&sem->wait_lock ->&p->pi_lock ->remove_cache_srcu ->&cfs_rq->removed.lock ->quarantine_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->uevent_sock_mutex.wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&lock->wait_lock ->&base->lock FD: 1 BD: 1 +.+.: wtd_deferred_reg_mutex FD: 92 BD: 1 +.+.: &type->s_umount_key#12/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#11 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#11 ->&dentry->d_lock FD: 110 BD: 1 +.+.: (work_completion)(&tracerfs_init_work) ->pin_fs_lock ->fs_reclaim ->pool_lock#2 ->sb_lock ->&c->lock ->&____s->seqcount ->&type->s_umount_key#13/1 ->&type->s_umount_key#14 ->mnt_id_ida.xa_lock ->pcpu_alloc_mutex ->&dentry->d_lock ->mount_lock ->&obj_hash[i].lock ->&fsnotify_mark_srcu ->&sb->s_type->i_mutex_key#5 ->event_mutex ->(module_notify_list).rwsem ->trace_types_lock FD: 95 BD: 2 +.+.: &type->s_umount_key#13/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->&sb->s_type->i_lock_key#12 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock ->&type->s_umount_key#14 FD: 38 BD: 6 +.+.: &sb->s_type->i_lock_key#12 ->&dentry->d_lock FD: 82 BD: 3 +.+.: &type->s_umount_key#14 ->sb_lock ->list_lrus_mutex ->&xa->xa_lock#5 ->&obj_hash[i].lock ->pool_lock#2 ->shrinker_mutex ->&rsp->gp_wait ->pcpu_lock ->fs_reclaim ->&dentry->d_lock ->&lru->node[i].lock FD: 10 BD: 304 ....: &xa->xa_lock#5 ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&n->list_lock FD: 79 BD: 4 +.+.: &sb->s_type->i_mutex_key#5 ->&sb->s_type->i_lock_key#12 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount FD: 4 BD: 10 ..-.: &rsp->gp_wait ->&obj_hash[i].lock ->pool_lock#2 FD: 92 BD: 1 +.+.: &type->s_umount_key#15/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#13 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 68 BD: 130 .+.+: &fsnotify_mark_srcu ->&conn->lock ->fs_reclaim ->pool_lock#2 ->&group->notification_lock ->&group->notification_waitq ->&rq->__lock ->&c->lock ->&n->list_lock ->remove_cache_srcu ->&obj_hash[i].lock ->&____s->seqcount#2 ->&____s->seqcount ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->batched_entropy_u8.lock ->kfence_freelist_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#13 ->&dentry->d_lock FD: 85 BD: 2 +.+.: event_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#5 ->trace_event_sem ->trace_types_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#16/1 ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#14 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#14 ->&dentry->d_lock FD: 1 BD: 5 +.+.: eventfs_mutex FD: 92 BD: 1 +.+.: &type->s_umount_key#17/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#15 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#15 ->&dentry->d_lock FD: 67 BD: 1 +.+.: kclist_lock ->resource_lock ->fs_reclaim ->pool_lock#2 FD: 92 BD: 1 +.+.: &type->s_umount_key#18/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->&c->lock ->&____s->seqcount ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#16 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#16 ->&dentry->d_lock FD: 257 BD: 2 +.+.: timer_update_work ->timer_keys_mutex FD: 256 BD: 3 +.+.: timer_keys_mutex ->cpu_hotplug_lock FD: 216 BD: 33 .+.+: tomoyo_ss ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tomoyo_policy_lock ->(console_sem).lock ->&obj_hash[i].lock ->&dentry->d_lock ->tomoyo_log_lock ->tomoyo_log_wait.lock ->&c->lock ->&____s->seqcount ->quarantine_lock ->file_systems_lock ->fs_reclaim ->&mm->mmap_lock ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->remove_cache_srcu ->&n->list_lock ->rcu_node_0 ->rename_lock ->&base->lock ->mount_lock ->&rcu_state.expedited_wq ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&fs->lock ->pool_lock FD: 66 BD: 1 +.+.: pnp_lock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: subsys mutex#22 FD: 3 BD: 1 +.+.: subsys mutex#23 ->&k->k_lock FD: 3 BD: 1 +.+.: subsys mutex#24 ->&k->k_lock FD: 4 BD: 1 +.+.: subsys mutex#25 ->&k->list_lock ->&k->k_lock FD: 1 BD: 1 ....: netevent_notif_chain.lock FD: 67 BD: 1 +.+.: clients_rwsem ->fs_reclaim ->clients.xa_lock FD: 2 BD: 2 +.+.: clients.xa_lock ->pool_lock#2 FD: 30 BD: 8 .+.+: devices_rwsem ->rcu_node_0 ->&rq->__lock ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: (blocking_lsm_notifier_chain).rwsem FD: 105 BD: 52 ++++: (inetaddr_chain).rwsem ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->fib_info_lock ->&dir->lock#2 ->&____s->seqcount ->&c->lock ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->&net->sctp.local_addr_lock ->rlock-AF_NETLINK ->&rq->__lock ->&n->list_lock ->rcu_node_0 ->remove_cache_srcu ->&ipvlan->addrs_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->&tbl->lock ->class ->(&tbl->proxy_timer) ->&base->lock ->krc.lock ->mmu_notifier_invalidate_range_start ->pcpu_alloc_mutex.wait_lock ->&p->pi_lock ->quarantine_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->(inetaddr_chain).rwsem.wait_lock ->&x->wait#2 ->&meta->lock FD: 1 BD: 9 ....: inet6addr_chain.lock FD: 1 BD: 1 +.+.: buses_mutex FD: 1 BD: 1 +.+.: offload_lock FD: 1 BD: 1 +...: inetsw_lock FD: 76 BD: 2 +.+.: pcpu_balance_work ->pcpu_alloc_mutex ->pcpu_alloc_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 54 +.+.: ptype_lock FD: 738 BD: 1 +.+.: (wq_completion)events_power_efficient ->(work_completion)(&(&tbl->managed_work)->work) ->(check_lifetime_work).work ->(work_completion)(&(&cache_cleaner)->work) ->(work_completion)(&(&ops->cursor_work)->work) ->(work_completion)(&(&hub->init_work)->work) ->(work_completion)(&(&gc_work->dwork)->work) ->(work_completion)(&(&tbl->gc_work)->work) ->(gc_work).work ->(work_completion)(&(&fw_cache.work)->work) ->&rq->__lock ->(crda_timeout).work ->(reg_check_chans).work FD: 45 BD: 2 +.+.: (work_completion)(&(&tbl->managed_work)->work) ->&tbl->lock FD: 44 BD: 125 +.-.: &tbl->lock ->&obj_hash[i].lock ->&base->lock ->&n->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&c->lock ->pool_lock#2 ->nl_table_lock ->rlock-AF_NETLINK ->nl_table_wait.lock ->&dir->lock#2 ->krc.lock ->batched_entropy_u32.lock ->&zone->lock ->&____s->seqcount ->tk_core.seq.seqcount ->&n->list_lock ->&____s->seqcount#2 ->&meta->lock ->quarantine_lock ->init_task.mems_allowed_seq.seqcount FD: 30 BD: 2 +.+.: (check_lifetime_work).work ->&obj_hash[i].lock ->&base->lock ->rcu_node_0 ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 52 +.+.: &net->rules_mod_lock FD: 1 BD: 1 +.+.: tcp_ulp_list_lock FD: 1 BD: 1 +...: xfrm_state_afinfo_lock FD: 1 BD: 1 +.+.: xfrm_policy_afinfo_lock FD: 1 BD: 1 +...: xfrm_input_afinfo_lock FD: 16 BD: 553 ..-.: krc.lock ->&obj_hash[i].lock ->hrtimer_bases.lock ->&base->lock FD: 1 BD: 53 +...: k-slock-AF_INET/1 FD: 70 BD: 1 +.+.: (wq_completion)events_highpri ->(work_completion)(&(&krcp->page_cache_work)->work) ->(work_completion)(flush) FD: 67 BD: 2 +.+.: (work_completion)(&(&krcp->page_cache_work)->work) ->fs_reclaim ->&____s->seqcount ->krc.lock FD: 1 BD: 2 +.+.: &hashinfo->lock FD: 1 BD: 1 +.+.: tcp_cong_list_lock FD: 1 BD: 1 +.+.: mptcp_sched_list_lock FD: 2 BD: 7 +.+.: cache_list_lock ->&cd->hash_lock FD: 28 BD: 2 +.+.: (work_completion)(&(&cache_cleaner)->work) ->cache_list_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 1 BD: 1 +.+.: (rpc_pipefs_notifier_list).rwsem FD: 1 BD: 1 +.+.: svc_xprt_class_lock FD: 40 BD: 1 +.+.: xprt_list_lock ->(console_sem).lock FD: 158 BD: 3 ++++: umhelper_sem ->usermodehelper_disabled_waitq.lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->&k->k_lock ->subsys mutex#73 ->fw_lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&x->wait#22 ->&base->lock ->&rq->__lock ->(&timer.timer) ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount#2 FD: 1 BD: 4 ....: usermodehelper_disabled_waitq.lock FD: 28 BD: 404 +.+.: &dentry->d_lock/1 ->&lru->node[i].lock FD: 111 BD: 3 .+.+: sb_writers#2 ->mount_lock ->&sb->s_type->i_mutex_key/1 ->&sb->s_type->i_mutex_key FD: 107 BD: 4 +.+.: &sb->s_type->i_mutex_key/1 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&obj_hash[i].lock ->tomoyo_ss ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#2 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key ->&c->lock ->&____s->seqcount FD: 1 BD: 2 +.+.: tomoyo_log_lock FD: 1 BD: 2 ....: tomoyo_log_wait.lock FD: 60 BD: 148 +.+.: &wb->list_lock ->&sb->s_type->i_lock_key#2 ->&sb->s_type->i_lock_key#23 ->&sb->s_type->i_lock_key#22 ->&sb->s_type->i_lock_key ->&sb->s_type->i_lock_key#5 ->&sb->s_type->i_lock_key#24 ->&sb->s_type->i_lock_key#3 ->&sb->s_type->i_lock_key#7 FD: 175 BD: 2 +.+.: (work_completion)(&sub_info->work) ->&sighand->siglock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->batched_entropy_u64.lock ->&obj_hash[i].lock ->init_files.file_lock ->init_fs.lock ->&p->alloc_lock ->lock ->pidmap_lock ->cgroup_threadgroup_rwsem ->&c->lock ->input_pool.lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&n->list_lock ->rcu_node_0 ->&sig->wait_chldexit ->tasklist_lock ->&prev->lock ->&(&sig->stats_lock)->lock ->css_set_lock ->&x->wait#16 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->remove_cache_srcu ->&rcu_state.expedited_wq ->quarantine_lock FD: 1 BD: 3 +.+.: &drv->dynids.lock FD: 1 BD: 1 +.+.: umh_sysctl_lock FD: 211 BD: 2 +.+.: &tsk->futex_exit_mutex ->&p->pi_lock ->&rq->__lock ->&mm->mmap_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 29 BD: 1 +.+.: &child->perf_event_mutex ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 106 ....: &pid->wait_pidfd FD: 27 BD: 156 ....: &sig->wait_chldexit ->&p->pi_lock FD: 29 BD: 156 ....: &(&sig->stats_lock)->lock ->&____s->seqcount#4 FD: 28 BD: 157 ....: &____s->seqcount#4 ->pidmap_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: subsys mutex#26 FD: 177 BD: 1 +.+.: subsys mutex#27 ->&k->list_lock ->&k->k_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->platform_devid_ida.xa_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->&(&priv->bus_notifier)->rwsem ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#4 ->wakeup_ida.xa_lock ->gdp_mutex ->subsys mutex#13 ->events_lock ->rtcdev_lock FD: 1 BD: 1 +.+.: subsys mutex#28 FD: 1 BD: 102 +.+.: subsys mutex#29 FD: 1 BD: 4 +.+.: key_user_lock FD: 1 BD: 4 +.+.: key_serial_lock FD: 5 BD: 5 +.+.: key_construction_mutex ->&obj_hash[i].lock ->pool_lock#2 ->keyring_name_lock FD: 73 BD: 3 +.+.: &type->lock_class ->keyring_serialise_link_lock ->fs_reclaim ->pool_lock#2 ->key_user_lock ->crngs.lock ->key_serial_lock ->key_construction_mutex ->ima_keys_lock ->&c->lock ->&____s->seqcount FD: 69 BD: 4 +.+.: keyring_serialise_link_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->root_key_user.lock ->key_construction_mutex FD: 1 BD: 1 ....: &pgdat->kswapd_wait FD: 1 BD: 1 +.+.: drivers_lock FD: 116 BD: 1 +.+.: damon_dbgfs_lock ->fs_reclaim ->pool_lock#2 ->damon_ops_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 FD: 92 BD: 1 +.+.: &type->s_umount_key#19/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#17 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#17 ->&dentry->d_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#20/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 8 +.+.: &sb->s_type->i_lock_key#18 ->&dentry->d_lock FD: 1 BD: 1 +.+.: configfs_subsystem_mutex FD: 87 BD: 1 +.+.: &sb->s_type->i_mutex_key#6/1 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&____s->seqcount ->&c->lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]/2 ->&sb->s_type->i_mutex_key#7/2 ->&default_group_class[depth - 1]#3 ->&rq->__lock FD: 1 BD: 9 +.+.: configfs_dirent_lock FD: 84 BD: 2 +.+.: &default_group_class[depth - 1]/2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#4/2 ->&c->lock ->&____s->seqcount FD: 66 BD: 1 +.+.: ecryptfs_daemon_hash_mux ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 ....: &ecryptfs_kthread_ctl.wait FD: 2 BD: 1 +.+.: ecryptfs_msg_ctx_lists_mux ->&ecryptfs_msg_ctx_arr[i].mux FD: 1 BD: 2 +.+.: &ecryptfs_msg_ctx_arr[i].mux FD: 41 BD: 1 +...: put_task_map-wait-type-override ->&obj_hash[i].lock ->pool_lock#2 ->pool_lock ->quarantine_lock ->&base->lock ->&meta->lock ->kfence_freelist_lock ->task_group_lock ->stock_lock ->css_set_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->percpu_counters_lock ->pcpu_lock FD: 1 BD: 1 +.+.: nfs_version_lock FD: 93 BD: 1 ++++: key_types_sem ->(console_sem).lock ->asymmetric_key_parsers_sem ->&type->lock_class ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: pnfs_spinlock FD: 26 BD: 5 +.+.: &sn->pipefs_sb_lock ->&rq->__lock FD: 1 BD: 1 +.+.: nls_lock FD: 37 BD: 2 +.+.: (work_completion)(&p->wq) ->vmap_area_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->pool_lock#2 ->quarantine_lock ->&rq->__lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&base->lock ->&meta->lock ->kfence_freelist_lock ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: jffs2_compressor_list_lock FD: 31 BD: 1 ..-.: &(&cache_cleaner)->timer FD: 1 BD: 1 +.+.: next_tag_value_lock FD: 14 BD: 1 +.-.: (&tcp_orphan_timer) ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 1 ....: log_redrive_lock FD: 2 BD: 1 ....: &TxAnchor.LazyLock ->jfs_commit_thread_wait.lock FD: 1 BD: 2 ....: jfs_commit_thread_wait.lock FD: 1 BD: 1 +.+.: jfsTxnLock FD: 40 BD: 1 +.+.: ocfs2_stack_lock ->(console_sem).lock FD: 1 BD: 1 +.+.: o2hb_callback_sem FD: 1 BD: 1 +.+.: o2net_handler_lock FD: 3 BD: 1 +.+.: subsys mutex#30 ->&k->k_lock FD: 93 BD: 1 +.+.: &type->s_umount_key#21/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#19 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock ->&n->list_lock ->&rq->__lock ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock FD: 38 BD: 5 +.+.: &sb->s_type->i_lock_key#19 ->&dentry->d_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#22/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->&c->lock ->&____s->seqcount ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#20 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#20 ->&dentry->d_lock FD: 1 BD: 1 +.+.: cipso_v4_doi_list_lock FD: 252 BD: 58 +.+.: nf_hook_mutex ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->&c->lock ->stock_lock ->&____s->seqcount#2 ->cpu_hotplug_lock ->&n->list_lock ->&rq->__lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 66 BD: 1 +.+.: alg_types_sem ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: dma_list_mutex FD: 84 BD: 2 ++++: asymmetric_key_parsers_sem ->(console_sem).lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->crypto_alg_sem ->&obj_hash[i].lock ->(crypto_chain).rwsem ->&x->wait#20 ->&base->lock ->&rq->__lock ->(&timer.timer) FD: 720 BD: 1 +.+.: blkcg_pol_register_mutex ->blkcg_pol_mutex ->cgroup_mutex FD: 1 BD: 2 +.+.: elv_list_lock FD: 69 BD: 1 +.+.: crc_t10dif_mutex ->crypto_alg_sem ->fs_reclaim ->pool_lock#2 FD: 69 BD: 1 +.+.: crc64_rocksoft_mutex ->crypto_alg_sem ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: ts_mod_lock FD: 1 BD: 1 +.+.: pci_ep_cfs_subsys.su_mutex FD: 80 BD: 1 +.+.: &default_group_class[depth - 1]#2/1 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key#7/2 FD: 1 BD: 3 +.+.: &sb->s_type->i_mutex_key#7/2 FD: 1 BD: 1 +.+.: pci_epf_mutex FD: 257 BD: 1 +.+.: ipmi_interfaces_mutex ->&k->list_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&k->k_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->pcpu_alloc_mutex ->cpu_hotplug_lock ->kthread_create_lock ->&p->pi_lock ->&rq->__lock ->&x->wait ->wq_pool_mutex ->&base->lock ->panic_notifier_list.lock FD: 2 BD: 1 +.+.: smi_watchers_mutex ->&ipmi_interfaces_srcu FD: 1 BD: 3 .+.?: &ipmi_interfaces_srcu FD: 1 BD: 1 +.+.: smi_infos_lock FD: 4 BD: 116 ....: mask_lock ->tmp_mask_lock FD: 3 BD: 117 ....: tmp_mask_lock ->tmpmask_lock ->&its->lock FD: 27 BD: 1 ....: &desc->wait_for_threads ->&p->pi_lock FD: 3 BD: 2 +.+.: subsys mutex#31 ->&k->k_lock FD: 28 BD: 3 +.+.: &dev->mutex#2 ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock FD: 138 BD: 1 +.+.: register_count_mutex ->&k->list_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&k->k_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock FD: 1 BD: 1 ....: thermal_cdev_ida.xa_lock FD: 1 BD: 1 ....: cpufreq_driver_lock FD: 3 BD: 1 +.+.: subsys mutex#32 ->&k->k_lock FD: 66 BD: 1 +.+.: scmi_requested_devices_mtx ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 ....: virtio_index_ida.xa_lock FD: 1 BD: 1 +.+.: subsys mutex#33 FD: 1 BD: 1 +.+.: vdpa_dev_lock FD: 101 BD: 3 ++++: &type->i_mutex_dir_key#2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&c->lock ->&____s->seqcount ->namespace_sem ->&n->list_lock ->&sem->wait_lock ->&rq->__lock ->remove_cache_srcu ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->tomoyo_ss ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#5 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&xattrs->lock ->&simple_offset_xa_lock ->smack_known_lock ->rcu_node_0 FD: 3 BD: 1 +.+.: subsys mutex#34 ->&k->k_lock FD: 31 BD: 1 ..-.: lib/debugobjects.c:101 FD: 28 BD: 2 +.+.: (debug_obj_work).work ->pool_lock#2 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->&meta->lock ->kfence_freelist_lock ->quarantine_lock FD: 1 BD: 6 +.+.: (work_completion)(&buf->work) FD: 1 BD: 1 ....: rng_index_ida.xa_lock FD: 152 BD: 4 +.+.: &md->mutex ->pci_lock ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#6 ->&rq->__lock ->&its->dev_alloc_lock ->&domain->mutex ->&irq_desc_lock_class ->tmpmask_lock ->&its->lock ->&root->kernfs_rwsem ->lock ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->sparse_irq_lock ->vmap_area_lock ->purge_vmap_area_lock FD: 8 BD: 5 +.+.: &xa->xa_lock#6 ->&c->lock ->&____s->seqcount ->pool_lock#2 FD: 68 BD: 8 +.+.: &its->dev_alloc_lock ->&its->lock ->fs_reclaim ->&zone->lock ->&____s->seqcount ->pool_lock#2 ->&c->lock ->lpi_range_lock ->&obj_hash[i].lock FD: 1 BD: 118 ....: tmpmask_lock FD: 1 BD: 1 +.+.: &dev->vqs_list_lock FD: 1 BD: 1 ....: &vp_dev->lock FD: 70 BD: 1 +.+.: rng_mutex ->&x->wait#13 ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock ->rng_mutex.wait_lock FD: 1 BD: 2 ....: &x->wait#13 FD: 27 BD: 2 -.-.: &x->wait#14 ->&p->pi_lock FD: 1 BD: 2 +.+.: rng_mutex.wait_lock FD: 28 BD: 1 +.+.: reading_mutex ->&x->wait#14 ->&rq->__lock FD: 1 BD: 1 ....: &dev->config_lock FD: 2 BD: 1 +.-.: drivers/char/random.c:1010 ->input_pool.lock FD: 1 BD: 1 ....: &dev->managed.lock FD: 92 BD: 1 +.+.: &type->s_umount_key#23/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#21 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#21 ->&dentry->d_lock FD: 2 BD: 198 ....: drm_minor_lock ->pool_lock#2 FD: 3 BD: 3 +.+.: subsys mutex#35 ->&k->k_lock FD: 66 BD: 16 +.+.: &dev->mode_config.idr_mutex ->fs_reclaim ->pool_lock#2 ->&rq->__lock FD: 111 BD: 12 +.+.: crtc_ww_class_acquire ->crtc_ww_class_mutex ->fs_reclaim ->pool_lock#2 ->&c->lock ->&n->list_lock ->rcu_node_0 ->&rq->__lock FD: 110 BD: 13 +.+.: crtc_ww_class_mutex ->reservation_ww_class_acquire ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&dev->mode_config.idr_mutex ->&dev->mode_config.blob_lock ->&crtc->commit_lock ->reservation_ww_class_mutex ->tk_core.seq.seqcount ->&vkms_out->lock ->&dev->vbl_lock ->&x->wait#15 ->(work_completion)(&vkms_state->composer_work) ->&base->lock ->&rq->__lock ->(&timer.timer) ->(work_completion)(&vkms_state->composer_work)#2 ->&n->list_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock FD: 1 BD: 14 +.+.: &dev->mode_config.blob_lock FD: 1 BD: 1 ....: &xa->xa_lock#7 FD: 1 BD: 1 ....: &xa->xa_lock#8 FD: 1 BD: 15 ....: &dev->mode_config.connector_list_lock FD: 18 BD: 17 ..-.: &dev->vbl_lock ->&dev->vblank_time_lock FD: 162 BD: 1 .+.+: drm_connector_list_iter ->&dev->mode_config.connector_list_lock ->fs_reclaim ->pool_lock#2 ->&connector->mutex ->&c->lock ->&n->list_lock FD: 160 BD: 2 +.+.: &connector->mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&____s->seqcount ->&c->lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->&base->lock ->&k->k_lock ->subsys mutex#35 ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&dev->mode_config.idr_mutex ->connector_list_lock FD: 1 BD: 3 +.+.: connector_list_lock FD: 1 BD: 1 +.+.: &dev->filelist_mutex FD: 197 BD: 1 +.+.: &dev->clientlist_mutex ->&helper->lock ->registration_lock ->(console_sem).lock ->kernel_fb_helper_lock FD: 150 BD: 8 +.+.: &helper->lock ->fs_reclaim ->pool_lock#2 ->&client->modeset_mutex ->&obj_hash[i].lock ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&mgr->vm_lock ->&dev->object_name_lock ->&node->vm_lock ->&file_private->table_lock ->&dev->mode_config.idr_mutex ->&dev->mode_config.fb_lock ->&file->fbs_lock ->&prime_fpriv->lock ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->&dev->master_mutex ->&lock->wait_lock ->&rq->__lock ->reservation_ww_class_mutex ->&p->pi_lock FD: 113 BD: 10 +.+.: &client->modeset_mutex ->&dev->mode_config.mutex ->fs_reclaim ->pool_lock#2 ->crtc_ww_class_acquire FD: 112 BD: 11 +.+.: &dev->mode_config.mutex ->crtc_ww_class_acquire ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock FD: 2 BD: 9 +.+.: &mgr->vm_lock ->pool_lock#2 FD: 61 BD: 9 +.+.: &dev->object_name_lock ->lock FD: 4 BD: 198 +.+.: &file_private->table_lock ->pool_lock#2 ->&obj_hash[i].lock FD: 4 BD: 9 +.+.: &node->vm_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 9 +.+.: &dev->mode_config.fb_lock FD: 1 BD: 9 +.+.: &file->fbs_lock FD: 1 BD: 9 +.+.: &prime_fpriv->lock FD: 195 BD: 2 +.+.: registration_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#11 ->vt_switch_mutex ->(console_sem).lock ->console_lock FD: 66 BD: 3 +.+.: vt_switch_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 6 +.+.: &fb_info->lock FD: 1 BD: 6 ....: vt_event_lock FD: 116 BD: 9 +.+.: &dev->master_mutex ->&client->modeset_mutex ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&file->master_lookup_lock ->&obj_hash[i].lock ->&n->list_lock ->&rq->__lock ->&dev->mode_config.idr_mutex ->&lock->wait_lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 1 BD: 14 +.+.: &crtc->commit_lock FD: 39 BD: 296 -.-.: &xa->xa_lock#9 ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->key#11 ->&n->list_lock ->&s->s_inode_wblist_lock ->&base->lock ->key#12 ->&wb->work_lock ->stock_lock ->key#14 ->&xa->xa_lock#5 ->&pl->lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 1 BD: 118 +.+.: &info->lock FD: 6 BD: 565 +.+.: lock#4 ->&lruvec->lru_lock ->&obj_hash[i].lock ->lock#11 FD: 2 BD: 567 ....: &lruvec->lru_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 34 BD: 14 -.-.: &vkms_out->lock ->&dev->event_lock FD: 33 BD: 15 -.-.: &dev->event_lock ->&dev->vbl_lock ->&____s->seqcount#5 ->&x->wait#15 ->&obj_hash[i].lock ->pool_lock#2 ->&dev->vblank_time_lock ->&vblank->queue ->&base->lock FD: 1 BD: 20 ----: &____s->seqcount#5 FD: 27 BD: 16 -.-.: &x->wait#15 ->&p->pi_lock FD: 17 BD: 18 -.-.: &dev->vblank_time_lock ->tk_core.seq.seqcount ->&(&vblank->seqlock)->lock ->&obj_hash[i].lock ->hrtimer_bases.lock FD: 2 BD: 19 -.-.: &(&vblank->seqlock)->lock ->&____s->seqcount#5 FD: 1 BD: 14 +.+.: (work_completion)(&vkms_state->composer_work) FD: 1 BD: 10 ....: &helper->damage_lock FD: 152 BD: 2 +.+.: (work_completion)(&helper->damage_work) ->&helper->damage_lock ->&helper->lock FD: 1 BD: 200 +.+.: &lock->wait_lock FD: 1 BD: 16 -.-.: &vblank->queue FD: 1 BD: 14 +.+.: (work_completion)(&vkms_state->composer_work)#2 FD: 1 BD: 2 +.+.: kernel_fb_helper_lock FD: 1 BD: 1 +.+.: drivers_lock#2 FD: 1 BD: 1 +.+.: devices_lock FD: 1 BD: 8 ....: blk_queue_ida.xa_lock FD: 2 BD: 12 +.+.: &xa->xa_lock#10 ->pool_lock#2 FD: 16 BD: 248 ....: &q->queue_lock ->&blkcg->lock ->pool_lock#2 ->pcpu_lock ->&obj_hash[i].lock ->percpu_counters_lock ->&c->lock ->&____s->seqcount FD: 2 BD: 249 ....: &blkcg->lock ->pool_lock#2 FD: 1 BD: 20 +.+.: &bdev->bd_size_lock FD: 3 BD: 12 +.+.: subsys mutex#36 ->&k->k_lock FD: 278 BD: 8 +.+.: &q->sysfs_dir_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->&q->sysfs_lock ->&obj_hash[i].lock FD: 277 BD: 9 +.+.: &q->sysfs_lock ->&q->debugfs_mutex ->&q->unused_hctx_lock ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&obj_hash[i].lock ->cpu_hotplug_lock ->fs_reclaim ->&xa->xa_lock#11 ->pcpu_alloc_mutex ->&q->rq_qos_mutex ->&stats->lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock ->lock ->&root->kernfs_rwsem FD: 114 BD: 11 +.+.: &q->debugfs_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 FD: 28 BD: 36 ....: percpu_ref_switch_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 3 BD: 8 +.+.: subsys mutex#37 ->&k->k_lock FD: 1 BD: 8 ....: cgwb_lock FD: 1 BD: 8 +...: bdi_lock FD: 58 BD: 254 +.+.: inode_hash_lock ->&sb->s_type->i_lock_key#3 ->&sb->s_type->i_lock_key#22 ->&s->s_inode_list_lock ->&sb->s_type->i_lock_key#24 ->&sb->s_type->i_lock_key#30 ->&sb->s_type->i_lock_key#31 FD: 27 BD: 10 +.+.: bdev_lock ->&bdev->bd_holder_lock ->&rq->__lock FD: 402 BD: 9 +.+.: &disk->open_mutex ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->&xa->xa_lock#9 ->lock#4 ->mmu_notifier_invalidate_range_start ->&c->lock ->&mapping->i_private_lock ->tk_core.seq.seqcount ->&ret->b_uptodate_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->&sb->s_type->i_lock_key#3 ->lock#5 ->&lruvec->lru_lock ->&folio_wait_table[i] ->&rq->__lock ->&cfs_rq->removed.lock ->(console_sem).lock ->&s->s_inode_list_lock ->pcpu_alloc_mutex ->&bdev->bd_size_lock ->&x->wait#9 ->ext_devt_ida.xa_lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&k->k_lock ->subsys mutex#36 ->&xa->xa_lock#10 ->inode_hash_lock ->bdev_lock ->&lo->lo_mutex ->nbd_index_mutex ->&nbd->config_lock ->&new->lock ->&lock->wait_lock ->rcu_node_0 ->&lo->lo_lock ->kernfs_idr_lock ->uevent_sock_mutex ->&bdev->bd_holder_lock ->&n->list_lock ->&sem->wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->nbd_index_mutex.wait_lock ->remove_cache_srcu FD: 1 BD: 118 +.+.: &mapping->i_private_lock FD: 28 BD: 12 -.-.: &ret->b_uptodate_lock ->bit_wait_table + i FD: 1 BD: 561 +.+.: lock#5 FD: 66 BD: 1 +.+.: loop_ctl_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 10 +.+.: &q->unused_hctx_lock FD: 2 BD: 10 +.+.: &xa->xa_lock#11 ->pool_lock#2 FD: 1 BD: 7 +.+.: &set->tag_list_lock FD: 29 BD: 23 +.+.: &q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->mq_freeze_wq ->&rq->__lock FD: 27 BD: 37 ..-.: &q->mq_freeze_wq ->&p->pi_lock FD: 119 BD: 10 +.+.: &q->rq_qos_mutex ->&q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->debugfs_mutex ->set->srcu FD: 1 BD: 10 ....: &stats->lock FD: 277 BD: 14 +.+.: nbd_index_mutex ->fs_reclaim ->pool_lock#2 ->&nbd->config_lock ->rlock-AF_NETLINK ->&c->lock ->&n->list_lock ->&rq->__lock ->nbd_index_mutex.wait_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 26 BD: 21 .+.+: set->srcu ->&rq->__lock FD: 34 BD: 3 +.+.: (work_completion)(&(&q->requeue_work)->work) ->&q->requeue_lock ->&hctx->lock ->&__ctx->lock FD: 30 BD: 3 +.+.: (work_completion)(&(&hctx->run_work)->work) FD: 31 BD: 1 ..-.: &(&ops->cursor_work)->timer FD: 33 BD: 2 +.+.: (work_completion)(&(&ops->cursor_work)->work) ->(console_sem).lock ->&obj_hash[i].lock ->&base->lock FD: 295 BD: 1 +.+.: zram_index_mutex ->fs_reclaim ->pool_lock#2 ->blk_queue_ida.xa_lock ->&obj_hash[i].lock ->pcpu_alloc_mutex ->bio_slab_lock ->percpu_counters_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->&xa->xa_lock#10 ->lock ->&q->queue_lock ->&x->wait#9 ->&bdev->bd_size_lock ->&k->list_lock ->gdp_mutex ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->subsys mutex#36 ->dev_hotplug_mutex ->&q->sysfs_dir_lock ->percpu_ref_switch_lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#37 ->cgwb_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->bdi_lock ->inode_hash_lock ->(console_sem).lock FD: 3 BD: 1 +.+.: subsys mutex#38 ->&k->k_lock FD: 67 BD: 2 +.+.: &default_group_class[depth - 1]#3 ->fs_reclaim ->pool_lock#2 ->configfs_dirent_lock FD: 2 BD: 1 +.+.: &lock ->nullb_indexes.xa_lock FD: 1 BD: 2 ....: nullb_indexes.xa_lock FD: 8 BD: 2 ....: nfc_index_ida.xa_lock ->&c->lock ->&n->list_lock ->pool_lock#2 FD: 168 BD: 4 +.+.: nfc_devlist_mutex ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->subsys mutex#39 ->&____s->seqcount ->&k->k_lock ->&genl_data->genl_data_mutex ->&c->lock ->&n->list_lock ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->&x->wait#9 ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock ->&____s->seqcount#2 ->nfc_devlist_mutex.wait_lock ->&cfs_rq->removed.lock ->remove_cache_srcu ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->uevent_sock_mutex.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->dpm_list_mtx.wait_lock ->kn->active#4 ->dev_pm_qos_sysfs_mtx.wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 28 BD: 5 +.+.: subsys mutex#39 ->&k->k_lock ->&rq->__lock FD: 4 BD: 4 +.+.: llcp_devices_lock ->&k->list_lock ->&k->k_lock FD: 1 BD: 58 ....: &rfkill->lock FD: 29 BD: 15 +.+.: subsys mutex#40 ->&k->k_lock ->&rq->__lock ->&lock->wait_lock FD: 151 BD: 6 +.+.: (work_completion)(&rfkill->sync_work) ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 15 +.+.: rfkill_global_mutex.wait_lock FD: 2 BD: 1 +.+.: dma_heap_minors.xa_lock ->pool_lock#2 FD: 3 BD: 1 +.+.: subsys mutex#41 ->&k->k_lock FD: 1 BD: 1 +.+.: heap_list_lock FD: 4 BD: 1 +.+.: subsys mutex#42 ->&k->list_lock ->&k->k_lock FD: 1 BD: 1 +.+.: nvmf_hosts_mutex FD: 3 BD: 1 +.+.: subsys mutex#43 ->&k->k_lock FD: 1 BD: 1 +.+.: nvmf_transports_rwsem FD: 3 BD: 1 +.+.: subsys mutex#44 ->&k->k_lock FD: 83 BD: 3 +.+.: &default_group_class[depth - 1]#4/2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#5/2 FD: 1 BD: 1 +.+.: nvmet_config_sem FD: 3 BD: 1 +.+.: subsys mutex#45 ->&k->k_lock FD: 1 BD: 3 ....: nvme_instance_ida.xa_lock FD: 3 BD: 3 +.+.: subsys mutex#46 ->&k->k_lock FD: 127 BD: 3 +.+.: pools_reg_lock ->pools_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem FD: 1 BD: 4 +.+.: pools_lock FD: 82 BD: 4 +.+.: &default_group_class[depth - 1]#5/2 ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#6/2 FD: 81 BD: 5 +.+.: &default_group_class[depth - 1]#6/2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#7 ->&default_group_class[depth - 1]#7/2 FD: 67 BD: 6 +.+.: &default_group_class[depth - 1]#7 ->fs_reclaim ->&____s->seqcount ->pool_lock#2 ->configfs_dirent_lock FD: 1 BD: 6 +.+.: &default_group_class[depth - 1]#7/2 FD: 1 BD: 1 +.+.: backend_mutex FD: 1 BD: 1 +.+.: scsi_mib_index_lock FD: 1 BD: 1 +.+.: hba_lock FD: 66 BD: 1 +.+.: device_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: &hba->device_lock FD: 1 BD: 1 +.+.: part_parser_lock FD: 310 BD: 1 +.+.: mtd_table_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&rq->__lock ->&x->wait#11 ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#47 ->devtree_lock ->nvmem_ida.xa_lock ->nvmem_cell_mutex ->&k->k_lock ->subsys mutex#48 ->nvmem_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->(console_sem).lock ->pcpu_alloc_mutex ->batched_entropy_u32.lock ->mmu_notifier_invalidate_range_start ->blk_queue_ida.xa_lock ->&q->sysfs_lock ->&set->tag_list_lock ->bio_slab_lock ->percpu_counters_lock ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->&xa->xa_lock#10 ->&q->mq_freeze_lock ->set->srcu ->percpu_ref_switch_lock ->&q->queue_lock ->&bdev->bd_size_lock ->elv_list_lock ->(work_completion)(&(&q->requeue_work)->work) ->(work_completion)(&(&hctx->run_work)->work) ->&q->debugfs_mutex ->subsys mutex#36 ->dev_hotplug_mutex ->&q->sysfs_dir_lock ->subsys mutex#37 ->cgwb_lock ->bdi_lock ->inode_hash_lock FD: 1 BD: 1 +.+.: chip_drvs_lock FD: 1 BD: 6 ....: (kmod_concurrent_max).lock FD: 27 BD: 8 ....: &x->wait#16 ->&p->pi_lock FD: 1 BD: 3 ....: &prev->lock FD: 3 BD: 2 +.+.: subsys mutex#47 ->&k->k_lock FD: 2 BD: 3 ....: &ctrl->lock ->&ctrl->state_wq FD: 1 BD: 4 ....: &ctrl->state_wq FD: 1 BD: 2 ....: nvmem_ida.xa_lock FD: 1 BD: 2 +.+.: nvmem_cell_mutex FD: 1 BD: 9 +.+.: &hctx->lock FD: 1 BD: 122 +.+.: &nvmeq->sq_lock FD: 27 BD: 6 ..-.: &x->wait#17 ->&p->pi_lock FD: 1 BD: 2 +.+.: subsys mutex#48 FD: 1 BD: 2 +.+.: nvmem_mutex FD: 147 BD: 6 +.+.: nvme_subsystems_lock ->fs_reclaim ->&____s->seqcount ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->&rq->__lock ->subsys mutex#49 FD: 3 BD: 7 +.+.: subsys mutex#49 ->&k->k_lock FD: 1 BD: 7 +.+.: &xa->xa_lock#12 FD: 155 BD: 1 +.+.: &dev->shutdown_lock ->&md->mutex ->&desc->request_mutex ->&obj_hash[i].lock ->pool_lock#2 ->pci_lock ->fs_reclaim ->free_vmap_area_lock ->vmap_area_lock ->&c->lock ->&____s->seqcount ->register_lock ->&irq_desc_lock_class ->proc_subdir_lock ->proc_inum_ida.xa_lock FD: 1 BD: 116 ....: irq_resend_lock FD: 1 BD: 115 +.+.: &ent->pde_unload_lock FD: 8 BD: 58 +.+.: (work_completion)(work) ->lock#4 ->lock#5 FD: 1 BD: 6 ++++: &ctrl->namespaces_rwsem FD: 435 BD: 4 +.+.: (work_completion)(&ctrl->scan_work) ->&ctrl->scan_lock FD: 437 BD: 3 +.+.: (wq_completion)nvme-wq ->(work_completion)(&ctrl->async_event_work) ->(work_completion)(&ctrl->scan_work) ->(work_completion)(&barr->work) FD: 2 BD: 4 +.+.: (work_completion)(&ctrl->async_event_work) ->&nvmeq->sq_lock FD: 434 BD: 5 +.+.: &ctrl->scan_lock ->fs_reclaim ->pool_lock#2 ->&hctx->lock ->&x->wait#17 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->&c->lock ->&____s->seqcount ->&ctrl->namespaces_rwsem ->blk_queue_ida.xa_lock ->pcpu_alloc_mutex ->&q->sysfs_lock ->&set->tag_list_lock ->bio_slab_lock ->percpu_counters_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->&xa->xa_lock#10 ->lock ->&q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->queue_lock ->&x->wait#9 ->nvme_subsystems_lock ->&subsys->lock ->&bdev->bd_size_lock ->ext_devt_ida.xa_lock ->&k->list_lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&cfs_rq->removed.lock ->&x->wait#11 ->&k->k_lock ->subsys mutex#36 ->dev_hotplug_mutex ->&q->sysfs_dir_lock ->gdp_mutex ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#37 ->cgwb_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->bdi_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->nvme_ns_chr_minor_ida.xa_lock ->chrdevs_lock ->subsys mutex#51 ->&dentry->d_lock ->quarantine_lock FD: 1 BD: 52 +.+.: &bond->stats_lock FD: 34 BD: 66 ....: lweventlist_lock ->pool_lock#2 ->&dir->lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&base->lock ->&n->list_lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 712 BD: 2 +.+.: (linkwatch_work).work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 65 +.+.: rtnl_mutex.wait_lock FD: 3 BD: 129 ..-.: once_lock ->crngs.lock FD: 256 BD: 2 +.+.: (work_completion)(&w->work) ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 77 BD: 6 +.+.: &subsys->lock ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#12 ->&obj_hash[i].lock ->pcpu_alloc_mutex FD: 27 BD: 52 ++++: (inet6addr_validator_chain).rwsem ->&rq->__lock FD: 26 BD: 52 ++++: (inetaddr_validator_chain).rwsem ->&rq->__lock FD: 1 BD: 10 ....: ext_devt_ida.xa_lock FD: 1 BD: 568 ....: &sem->wait_lock FD: 35 BD: 1 +.+.: (wq_completion)gid-cache-wq ->(work_completion)(&ndev_work->work) ->(work_completion)(&work->work) ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 33 BD: 2 +.+.: (work_completion)(&ndev_work->work) ->devices_rwsem ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rq->__lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&base->lock ->&rcu_state.expedited_wq FD: 27 BD: 554 -.-.: &folio_wait_table[i] ->&p->pi_lock FD: 3 BD: 1 +.+.: subsys mutex#50 ->&k->k_lock FD: 1 BD: 1 +.+.: gpio_lookup_lock FD: 1 BD: 1 +.+.: mdio_board_lock FD: 1 BD: 6 ....: nvme_ns_chr_minor_ida.xa_lock FD: 1 BD: 1 +.+.: mode_list_lock FD: 3 BD: 6 +.+.: subsys mutex#51 ->&k->k_lock FD: 1 BD: 1 +.+.: l3mdev_lock FD: 1 BD: 120 -.-.: &retval->lock FD: 45 BD: 1 +.+.: (wq_completion)gve ->(work_completion)(&priv->service_task) FD: 44 BD: 2 +.+.: (work_completion)(&priv->service_task) ->(console_sem).lock ->lweventlist_lock ->&obj_hash[i].lock ->&rq->__lock ->&base->lock FD: 1 BD: 1 +.+.: hnae3_common_lock FD: 3 BD: 1 +.+.: subsys mutex#52 ->&k->k_lock FD: 2 BD: 1 +.+.: compressor_list_lock ->pool_lock#2 FD: 1 BD: 5 ....: hwsim_netgroup_ida.xa_lock FD: 35 BD: 88 +.-.: hwsim_radio_lock ->pool_lock#2 ->&c->lock ->&list->lock#16 ->&____s->seqcount#2 ->&____s->seqcount ->&zone->lock ->&n->list_lock ->init_task.mems_allowed_seq.seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 3 BD: 9 +.+.: subsys mutex#53 ->&k->k_lock FD: 68 BD: 53 +.+.: param_lock ->rate_ctrl_mutex ->disk_events_mutex ->&rq->__lock FD: 398 BD: 55 +.+.: &rdev->wiphy.mtx ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#54 ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->nl_table_lock ->nl_table_wait.lock ->reg_requests_lock ->stack_depot_init_mutex ->pcpu_alloc_mutex ->&xa->xa_lock#4 ->net_rwsem ->&x->wait#9 ->subsys mutex#20 ->&dir->lock#2 ->&rq->__lock ->&cfs_rq->removed.lock ->dev_hotplug_mutex ->dev_base_lock ->input_pool.lock ->batched_entropy_u32.lock ->&tbl->lock ->sysctl_lock ->&fq->lock ->&local->iflist_mtx ->rlock-AF_NETLINK ->&rdev->bss_lock ->lweventlist_lock ->&data->mutex ->&rdev->wiphy_work_lock ->&local->filter_lock ->&base->lock ->&tn->lock ->failover_lock ->proc_subdir_lock ->proc_inum_ida.xa_lock ->&idev->mc_lock ->&pnettable->lock ->smc_ib_devices.mutex ->&ndev->lock ->&wdev->event_lock ->&rdev->mgmt_registrations_lock ->(&dwork->timer) ->&dentry->d_lock ->&fsnotify_mark_srcu ->&sb->s_type->i_lock_key#7 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->mount_lock ->(&dwork->timer)#2 ->(work_completion)(&(&link->color_collision_detect_work)->work) ->rtnl_mutex.wait_lock ->&p->pi_lock ->&list->lock#15 ->&ifibss->incomplete_lock ->(console_sem).lock ->console_owner_lock ->console_owner ->tk_core.seq.seqcount ->hrtimer_bases.lock ->&list->lock#2 ->pool_lock ->&n->list_lock ->&lock->wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->lock#6 ->&sta->lock ->&____s->seqcount#2 ->uevent_sock_mutex.wait_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&wdev->pmsr_lock ->&rnp->exp_wq[0] ->krc.lock ->&local->active_txq_lock[i] ->(work_completion)(&sta->drv_deliver_wk) ->&local->queue_stop_reason_lock ->&x->wait#2 ->(&ifibss->timer) ->_xmit_ETHER ->(&local->dynamic_ps_timer) ->(&dwork->timer)#3 ->&rnp->exp_wq[2] ->&list->lock#16 ->&wq->mutex ->cpu_hotplug_lock ->&rnp->exp_wq[1] ->bpf_devs_lock ->&in_dev->mc_tomb_lock ->class ->(&tbl->proxy_timer) ->&ul->lock ->&net->xdp.lock ->mirred_list_lock ->&nft_net->commit_mutex ->&ent->pde_unload_lock ->&net->ipv6.addrconf_hash_lock ->&idev->mc_query_lock ->(work_completion)(&(&idev->mc_report_work)->work) ->&idev->mc_report_lock ->&pnn->pndevs.lock ->&pnn->routes.lock ->target_list_lock ->kernfs_idr_lock ->dev_pm_qos_sysfs_mtx ->deferred_probe_mutex ->device_links_lock ->&rnp->exp_wq[3] ->(&dwork->timer)#4 ->mmu_notifier_invalidate_range_start ->remove_cache_srcu ->&sem->wait_lock ->&ul->lock#2 ->&rnp->exp_lock ->rcu_state.exp_mutex ->rcu_state.exp_mutex.wait_lock ->(wq_completion)phy340 ->(wq_completion)phy339 ->(wq_completion)phy342 ->(wq_completion)phy341 ->(wq_completion)phy338 ->(wq_completion)phy337 FD: 28 BD: 56 +.+.: subsys mutex#54 ->&k->k_lock ->&rq->__lock FD: 1 BD: 56 +.+.: reg_requests_lock FD: 7 BD: 87 +.-.: &fq->lock ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&zone->lock ->pool_lock#2 FD: 1 BD: 56 +.+.: &local->iflist_mtx FD: 3 BD: 52 +.+.: subsys mutex#55 ->&k->k_lock FD: 2 BD: 53 +.+.: &sdata->sec_mtx ->&sec->lock FD: 1 BD: 58 ++..: &sec->lock FD: 1 BD: 52 +.+.: &local->iflist_mtx#2 FD: 66 BD: 1 +.+.: hwsim_phys_lock ->fs_reclaim ->pool_lock#2 FD: 66 BD: 1 +.+.: xdomain_lock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: ioctl_mutex FD: 1 BD: 1 +.+.: address_handler_list_lock FD: 1 BD: 1 +.+.: card_mutex FD: 3 BD: 1 +.+.: subsys mutex#56 ->&k->k_lock FD: 27 BD: 1 ....: &x->wait#18 ->&p->pi_lock FD: 29 BD: 2 ..-.: &txlock ->&list->lock#3 ->&txwq FD: 1 BD: 3 ..-.: &list->lock#3 FD: 27 BD: 3 ..-.: &txwq ->&p->pi_lock FD: 2 BD: 1 ....: &iocq[i].lock ->&ktiowq[i] FD: 1 BD: 2 ....: &ktiowq[i] FD: 1 BD: 1 ....: rcu_read_lock_bh FD: 1 BD: 65 +.-.: noop_qdisc.q.lock FD: 3 BD: 5 +.+.: subsys mutex#57 ->&k->k_lock FD: 205 BD: 1 +.+.: usb_bus_idr_lock ->(usb_notifier_list).rwsem ->fs_reclaim ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->hcd_root_hub_lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#19 ->&dev->power.lock ->device_links_srcu ->&____s->seqcount ->(console_sem).lock ->input_pool.lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&c->lock ->sysfs_symlink_target_lock ->&k->k_lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&(&priv->bus_notifier)->rwsem ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#58 ->&x->wait#9 ->&vhci_hcd->vhci->lock ->&lock->wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&base->lock ->(&timer.timer) ->&meta->lock FD: 138 BD: 1 +.+.: table_lock ->&k->list_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&k->k_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->&rq->__lock ->(console_sem).lock ->&____s->seqcount ->&c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 2 BD: 1 +.-.: (&ipmi_timer) ->&ipmi_interfaces_srcu FD: 1 BD: 5 +.+.: mon_lock FD: 143 BD: 4 +.+.: usb_port_peer_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&dev->power.lock ->dpm_list_mtx ->&k->k_lock ->dev_pm_qos_mtx ->component_mutex ->device_links_srcu ->dev_pm_qos_sysfs_mtx ->&rq->__lock ->sysfs_symlink_target_lock ->&base->lock ->device_state_lock FD: 32 BD: 27 ....: device_state_lock ->kernfs_notify_lock FD: 38 BD: 25 ..-.: hcd_root_hub_lock ->hcd_urb_list_lock ->&bh->lock ->&p->pi_lock ->&dev->power.lock ->device_state_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 26 ..-.: hcd_urb_list_lock FD: 1 BD: 26 ..-.: &bh->lock FD: 14 BD: 79 ..-.: lock#6 ->kcov_remote_lock ->&kcov->lock FD: 12 BD: 129 ..-.: kcov_remote_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 27 BD: 21 ..-.: &x->wait#19 ->&p->pi_lock FD: 1 BD: 4 +.+.: set_config_lock FD: 83 BD: 4 +.+.: hcd->bandwidth_mutex ->devtree_lock ->&obj_hash[i].lock ->&x->wait#9 ->&dev->power.lock ->fs_reclaim ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->hcd_root_hub_lock ->&rq->__lock ->&x->wait#19 ->&____s->seqcount ->&c->lock ->&base->lock ->(&timer.timer) ->&dum_hcd->dum->lock ->&n->list_lock FD: 1 BD: 4 +.+.: &new_driver->dynids.lock FD: 39 BD: 23 ..-.: &dum_hcd->dum->lock ->hcd_root_hub_lock ->hcd_urb_list_lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 FD: 84 BD: 11 +.+.: &hub->status_mutex ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->hcd_root_hub_lock ->fs_reclaim ->&dum_hcd->dum->lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#19 ->&base->lock ->(&timer.timer) ->&vhci_hcd->vhci->lock ->&____s->seqcount ->&c->lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&queue->lock ->&n->list_lock ->remove_cache_srcu ->quarantine_lock FD: 1 BD: 5 +.+.: component_mutex FD: 86 BD: 2 +.+.: (work_completion)(&(&hub->init_work)->work) ->&rq->__lock ->&lock->wait_lock ->&p->pi_lock FD: 1 BD: 4 +.+.: subsys mutex#58 FD: 225 BD: 1 +.+.: (wq_completion)usb_hub_wq ->(work_completion)(&hub->events) FD: 224 BD: 2 +.+.: (work_completion)(&hub->events) ->lock#6 ->&dev->power.lock ->&rq->__lock FD: 39 BD: 3 ..-.: &hub->irq_urb_lock ->hcd_root_hub_lock FD: 1 BD: 3 ....: (&hub->irq_urb_retry) FD: 1 BD: 5 ....: hcd_urb_unlink_lock FD: 27 BD: 3 ..-.: usb_kill_urb_queue.lock ->&p->pi_lock FD: 1 BD: 3 +.+.: (work_completion)(&hub->tt.clear_work) FD: 49 BD: 13 +.+.: udc_lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&rq->__lock ->udc_lock.wait_lock ->&queue->lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 3 BD: 1 +.+.: subsys mutex#59 ->&k->k_lock FD: 1 BD: 1 ....: gadget_id_numbers.xa_lock FD: 103 BD: 2 +.+.: (work_completion)(&gadget->work) ->&root->kernfs_rwsem ->kernfs_notify_lock FD: 31 BD: 189 ..-.: kernfs_notify_lock FD: 61 BD: 2 +.+.: kernfs_notify_work ->kernfs_notify_lock ->&root->kernfs_supers_rwsem ->&rq->__lock FD: 59 BD: 7 ++++: &root->kernfs_supers_rwsem ->inode_hash_lock ->&rq->__lock FD: 1 BD: 1 +.+.: subsys mutex#60 FD: 1 BD: 1 +.+.: func_lock FD: 1 BD: 1 +.+.: g_tf_lock FD: 1 BD: 13 ....: &vhci_hcd->vhci->lock FD: 2 BD: 2 ....: input_ida.xa_lock ->pool_lock#2 FD: 1 BD: 1 ....: &mousedev->mutex/1 FD: 31 BD: 4 ....: serio_event_lock ->pool_lock#2 FD: 59 BD: 1 +.+.: (wq_completion)events_long ->serio_event_work ->(work_completion)(&(&ipvs->defense_work)->work) ->(work_completion)(&(&br->gc_work)->work) ->(work_completion)(&br->mcast_gc_work) ->&rq->__lock FD: 36 BD: 2 +.+.: serio_event_work ->serio_mutex FD: 35 BD: 3 +.+.: serio_mutex ->serio_event_lock ->&k->list_lock ->&k->k_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 ....: rtc_ida.xa_lock FD: 33 BD: 1 +.+.: &rtc->ops_lock ->(efi_runtime_lock).lock ->&obj_hash[i].lock ->&x->wait#12 ->&rq->__lock FD: 1 BD: 2 ....: platform_devid_ida.xa_lock FD: 1 BD: 2 ....: rtcdev_lock FD: 66 BD: 1 +.+.: g_smscore_deviceslock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: cx231xx_devlist_mutex FD: 1 BD: 1 +.+.: em28xx_devlist_mutex FD: 1 BD: 1 ....: pvr2_context_sync_data.lock FD: 1 BD: 10 +.+.: i2c_dev_list_lock FD: 3 BD: 10 +.+.: subsys mutex#61 ->&k->k_lock FD: 1 BD: 1 +.+.: subsys mutex#62 FD: 153 BD: 2 +.+.: dvbdev_register_lock ->(console_sem).lock ->fs_reclaim ->pool_lock#2 ->minor_rwsem ->&xa->xa_lock#13 ->&mdev->graph_mutex ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&c->lock ->&____s->seqcount ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#63 FD: 154 BD: 1 +.+.: frontend_mutex ->fs_reclaim ->pool_lock#2 ->(console_sem).lock ->dvbdev_register_lock FD: 1 BD: 3 +.+.: minor_rwsem FD: 2 BD: 3 ....: &xa->xa_lock#13 ->pool_lock#2 FD: 66 BD: 4 +.+.: &mdev->graph_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount FD: 3 BD: 3 +.+.: subsys mutex#63 ->&k->k_lock FD: 1 BD: 1 ....: &dmxdev->lock FD: 1 BD: 1 +.+.: &dvbdemux->mutex FD: 1 BD: 1 +.+.: media_devnode_lock FD: 1 BD: 1 +.+.: subsys mutex#64 FD: 1 BD: 1 +.+.: videodev_lock FD: 3 BD: 1 +.+.: subsys mutex#65 ->&k->k_lock FD: 1 BD: 1 +.+.: vimc_sensor:396:(&vsensor->hdl)->_lock FD: 1 BD: 1 +.+.: &v4l2_dev->lock FD: 1 BD: 1 +.+.: vimc_debayer:581:(&vdebayer->hdl)->_lock FD: 1 BD: 1 +.+.: vimc_lens:61:(&vlens->hdl)->_lock FD: 76 BD: 1 +.+.: vivid_ctrls:1606:(hdl_user_gen)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->vivid_ctrls:1622:(hdl_vid_out)->_lock ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock ->vivid_ctrls:1627:(hdl_vbi_out)->_lock ->vivid_ctrls:1630:(hdl_radio_rx)->_lock ->vivid_ctrls:1632:(hdl_radio_tx)->_lock ->vivid_ctrls:1634:(hdl_sdr_cap)->_lock ->vivid_ctrls:1636:(hdl_meta_cap)->_lock ->vivid_ctrls:1638:(hdl_meta_out)->_lock ->vivid_ctrls:1640:(hdl_tch_cap)->_lock ->&zone->lock ->&obj_hash[i].lock FD: 67 BD: 1 +.+.: vivid_ctrls:1608:(hdl_user_vid)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 FD: 70 BD: 1 +.+.: vivid_ctrls:1610:(hdl_user_aud)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1622:(hdl_vid_out)->_lock ->vivid_ctrls:1630:(hdl_radio_rx)->_lock ->vivid_ctrls:1632:(hdl_radio_tx)->_lock FD: 74 BD: 1 +.+.: vivid_ctrls:1612:(hdl_streaming)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1622:(hdl_vid_out)->_lock ->&c->lock ->&____s->seqcount ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock ->vivid_ctrls:1627:(hdl_vbi_out)->_lock ->vivid_ctrls:1634:(hdl_sdr_cap)->_lock ->vivid_ctrls:1636:(hdl_meta_cap)->_lock ->vivid_ctrls:1638:(hdl_meta_out)->_lock ->vivid_ctrls:1640:(hdl_tch_cap)->_lock FD: 68 BD: 1 +.+.: vivid_ctrls:1614:(hdl_sdtv_cap)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock FD: 68 BD: 1 +.+.: vivid_ctrls:1616:(hdl_loop_cap)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock FD: 1 BD: 1 +.+.: vivid_ctrls:1618:(hdl_fb)->_lock FD: 1 BD: 7 +.+.: vivid_ctrls:1620:(hdl_vid_cap)->_lock FD: 1 BD: 4 +.+.: vivid_ctrls:1622:(hdl_vid_out)->_lock FD: 1 BD: 5 +.+.: vivid_ctrls:1625:(hdl_vbi_cap)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1627:(hdl_vbi_out)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1630:(hdl_radio_rx)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1632:(hdl_radio_tx)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1634:(hdl_sdr_cap)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1636:(hdl_meta_cap)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1638:(hdl_meta_out)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1640:(hdl_tch_cap)->_lock FD: 1 BD: 1 ....: &adap->kthread_waitq FD: 1 BD: 1 +.+.: &dev->cec_xfers_slock FD: 1 BD: 1 ....: &dev->kthread_waitq_cec FD: 1 BD: 1 +.+.: cec_devnode_lock FD: 1 BD: 1 +.+.: subsys mutex#66 FD: 5 BD: 1 +.+.: &adap->lock ->tk_core.seq.seqcount ->&adap->devnode.lock_fhs FD: 1 BD: 2 +.+.: &adap->devnode.lock_fhs FD: 1 BD: 1 ....: ptp_clocks_map.xa_lock FD: 3 BD: 1 +.+.: subsys mutex#67 ->&k->k_lock FD: 1 BD: 1 +.+.: pers_lock FD: 1 BD: 1 +.+.: _lock FD: 1 BD: 3 +.+.: dm_bufio_clients_lock FD: 1 BD: 1 +.+.: _ps_lock FD: 1 BD: 1 +.+.: _lock#2 FD: 1 BD: 1 +.+.: _lock#3 FD: 1 BD: 1 +.+.: register_lock#2 FD: 3 BD: 1 +.+.: subsys mutex#68 ->&k->k_lock FD: 1 BD: 1 .+.+: bp_lock FD: 3 BD: 1 +.+.: subsys mutex#69 ->&k->k_lock FD: 16 BD: 1 +.-.: (&dsp_spl_tl) ->dsp_lock FD: 15 BD: 2 ..-.: dsp_lock ->iclock_lock ->&obj_hash[i].lock ->&base->lock FD: 4 BD: 3 ...-: iclock_lock ->tk_core.seq.seqcount FD: 67 BD: 52 +.+.: lock#7 ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#15 FD: 1 BD: 1 ....: iscsi_transport_lock FD: 3 BD: 1 +.+.: subsys mutex#70 ->&k->k_lock FD: 1 BD: 1 ....: &tx_task->waiting FD: 1 BD: 1 +.+.: link_ops_rwsem FD: 165 BD: 1 +.+.: disable_lock ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->&(&priv->bus_notifier)->rwsem ->&c->lock ->&____s->seqcount ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#4 FD: 2 BD: 1 +.+.: protocol_lock ->pool_lock#2 FD: 89 BD: 1 +.+.: psinfo_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->(console_sem).lock ->pstore_sb_lock ->dump_list_lock ->&rq->__lock FD: 75 BD: 3 +.+.: pstore_sb_lock ->&sb->s_type->i_mutex_key#12 FD: 1 BD: 2 ....: dump_list_lock FD: 1 BD: 1 +.+.: vsock_register_mutex FD: 1 BD: 1 +.+.: comedi_drivers_list_lock FD: 132 BD: 1 +.+.: cscfg_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->&rq->__lock ->&cfs_rq->removed.lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx FD: 1 BD: 555 +.+.: icc_bw_lock FD: 3 BD: 6 +.+.: subsys mutex#71 ->&k->k_lock FD: 136 BD: 2 ++++: snd_ctl_layer_rwsem ->snd_ctl_led_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->&k->k_lock ->sysfs_symlink_target_lock FD: 1 BD: 3 +.+.: snd_card_mutex FD: 1 BD: 1 +.+.: snd_ioctl_rwsem FD: 66 BD: 2 +.+.: strings ->fs_reclaim ->&____s->seqcount ->pool_lock#2 FD: 1 BD: 2 +.+.: register_mutex FD: 149 BD: 3 +.+.: sound_mutex ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&rq->__lock ->&x->wait#11 ->&obj_hash[i].lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#71 ->&k->k_lock FD: 159 BD: 1 +.+.: register_mutex#2 ->fs_reclaim ->pool_lock#2 ->sound_mutex ->&obj_hash[i].lock ->register_mutex ->&c->lock ->&____s->seqcount ->sound_oss_mutex ->strings ->&entry->access ->info_mutex FD: 151 BD: 1 +.+.: register_mutex#3 ->fs_reclaim ->pool_lock#2 ->sound_mutex ->&c->lock ->&____s->seqcount ->clients_lock FD: 1 BD: 5 ....: clients_lock FD: 2 BD: 1 +.+.: &client->ports_mutex ->&client->ports_lock FD: 1 BD: 5 .+.+: &client->ports_lock FD: 152 BD: 1 +.+.: register_mutex#4 ->fs_reclaim ->pool_lock#2 ->sound_oss_mutex FD: 151 BD: 3 +.+.: sound_oss_mutex ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->sound_loader_lock ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#71 ->&k->k_lock FD: 1 BD: 4 +.+.: sound_loader_lock FD: 69 BD: 1 .+.+: &grp->list_mutex/1 ->clients_lock ->&client->ports_lock ->register_lock#3 ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 2 BD: 1 +.+.: &grp->list_mutex#2 ->&grp->list_lock FD: 1 BD: 2 ....: &grp->list_lock FD: 1 BD: 4 ....: register_lock#3 FD: 139 BD: 1 ++++: &card->controls_rwsem ->&xa->xa_lock#14 ->fs_reclaim ->&card->ctl_files_rwlock ->snd_ctl_layer_rwsem ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 9 BD: 2 +.+.: &xa->xa_lock#14 ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 1 BD: 2 ....: &card->ctl_files_rwlock FD: 78 BD: 2 +.+.: async_lookup_work ->fs_reclaim ->pool_lock#2 ->clients_lock ->&client->ports_lock ->register_lock#3 ->snd_card_mutex ->autoload_work ->&x->wait#10 ->&rq->__lock ->&obj_hash[i].lock FD: 4 BD: 3 +.+.: autoload_work ->&k->list_lock ->&k->k_lock FD: 1 BD: 3 +.+.: snd_ctl_led_mutex FD: 1 BD: 1 +.+.: register_mutex#5 FD: 67 BD: 1 +.+.: client_mutex ->fs_reclaim ->pool_lock#2 ->&dev->devres_lock FD: 1 BD: 56 +.+.: failover_lock FD: 2 BD: 2 +...: llc_sap_list_lock ->pool_lock#2 FD: 66 BD: 1 +.+.: act_id_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: act_mod_lock FD: 1 BD: 1 +.+.: ife_mod_lock FD: 1 BD: 1 +.+.: cls_mod_lock FD: 1 BD: 1 +.+.: ematch_mod_lock FD: 1 BD: 1 +.+.: sock_diag_table_mutex FD: 1 BD: 1 +.+.: nfnl_subsys_acct FD: 1 BD: 1 +.+.: nfnl_subsys_queue FD: 1 BD: 1 +.+.: nfnl_subsys_ulog FD: 26 BD: 5 +.+.: nf_log_mutex ->&rq->__lock FD: 26 BD: 1 +.+.: nfnl_subsys_osf ->&rq->__lock FD: 34 BD: 1 +.+.: nf_sockopt_mutex ->&rq->__lock ->nf_sockopt_mutex.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 75 BD: 1 +.+.: nfnl_subsys_ctnetlink ->fs_reclaim ->&c->lock ->&n->list_lock ->pool_lock#2 ->&obj_hash[i].lock ->rlock-AF_NETLINK ->remove_cache_srcu ->&rq->__lock ->nlk_cb_mutex-NETFILTER ->&lock->wait_lock ->nf_conntrack_mutex FD: 78 BD: 1 +.+.: nfnl_subsys_ctnetlink_exp ->nlk_cb_mutex-NETFILTER ->rlock-AF_NETLINK ->nf_conntrack_expect_lock FD: 1 BD: 5 +.+.: nf_ct_ecache_mutex FD: 1 BD: 1 +.+.: nfnl_subsys_cttimeout FD: 43 BD: 1 +.+.: nfnl_subsys_cthelper ->(console_sem).lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->&lock->wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: nf_ct_helper_mutex FD: 1 BD: 2 +...: nf_conntrack_expect_lock FD: 1 BD: 1 +.+.: nf_ct_nat_helpers_mutex FD: 1 BD: 1 +.+.: nfnl_subsys_nftables FD: 1 BD: 1 +.+.: nfnl_subsys_nftcompat FD: 871 BD: 1 +.+.: masq_mutex ->pernet_ops_rwsem ->(inetaddr_chain).rwsem ->inet6addr_chain.lock FD: 213 BD: 5 +.+.: &xt[i].mutex ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&mm->mmap_lock ->free_vmap_area_lock ->vmap_area_lock ->&per_cpu(xt_recseq, i) ->&obj_hash[i].lock ->purge_vmap_area_lock ->&rq->__lock ->init_mm.page_table_lock ->&n->list_lock ->&____s->seqcount#2 ->remove_cache_srcu ->&lock->wait_lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->quarantine_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&base->lock ->pool_lock FD: 29 BD: 71 +.+.: &tn->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 3 BD: 1 +.+.: subsys mutex#72 ->&k->k_lock FD: 26 BD: 5 +.+.: nfnl_subsys_ipset ->&rq->__lock FD: 1 BD: 1 +.+.: ip_set_type_mutex FD: 76 BD: 5 +.+.: ipvs->est_mutex ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->&c->lock ->pcpu_lock ->&obj_hash[i].lock ->&n->list_lock ->&rq->__lock ->pcpu_alloc_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 1 +.+.: ip_vs_sched_mutex FD: 64 BD: 5 +.+.: __ip_vs_app_mutex ->fs_reclaim ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock ->remove_cache_srcu ->&n->list_lock FD: 1 BD: 1 +.+.: ip_vs_pe_mutex FD: 1 BD: 1 +.+.: tunnel4_mutex FD: 1 BD: 1 +.+.: xfrm4_protocol_mutex FD: 1 BD: 1 +.+.: inet_diag_table_mutex FD: 1 BD: 1 +...: xfrm_km_lock FD: 1 BD: 1 +.+.: xfrm6_protocol_mutex FD: 1 BD: 1 +.+.: tunnel6_mutex FD: 1 BD: 1 +.+.: xfrm_if_cb_lock FD: 1 BD: 1 +...: inetsw6_lock FD: 1 BD: 6 +.+.: &hashinfo->lock#2 FD: 17 BD: 5 +.+.: &net->ipv6.ip6addrlbl_table.lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 151 BD: 59 +.+.: &idev->mc_lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&dev_addr_list_lock_key ->&c->lock ->&____s->seqcount ->_xmit_ETHER ->batched_entropy_u32.lock ->&base->lock ->&n->list_lock ->remove_cache_srcu ->krc.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&bridge_netdev_addr_lock_key ->&dev_addr_list_lock_key#2 ->&batadv_netdev_addr_lock_key ->&rq->__lock ->&vlan_netdev_addr_lock_key ->&macvlan_netdev_addr_lock_key ->&dev_addr_list_lock_key#3 ->&bridge_netdev_addr_lock_key/1 ->&dev_addr_list_lock_key/1 ->rcu_node_0 ->&dev_addr_list_lock_key#2/1 ->_xmit_ETHER/1 ->&batadv_netdev_addr_lock_key/1 ->&vlan_netdev_addr_lock_key/1 ->&macvlan_netdev_addr_lock_key/1 ->&dev_addr_list_lock_key#3/1 ->&macsec_netdev_addr_lock_key/1 ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&lock->wait_lock FD: 9 BD: 60 +...: &dev_addr_list_lock_key ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&n->list_lock FD: 43 BD: 72 +...: _xmit_ETHER ->&c->lock ->&local->filter_lock ->&rdev->wiphy_work_lock ->pool_lock#2 ->&____s->seqcount ->(console_sem).lock ->console_owner_lock ->console_owner ->&____s->seqcount#2 ->&obj_hash[i].lock ->krc.lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 714 BD: 1 +.+.: (wq_completion)ipv6_addrconf ->(work_completion)(&(&net->ipv6.addr_chk_work)->work) ->(work_completion)(&(&ifa->dad_work)->work) ->&rq->__lock FD: 712 BD: 6 +.+.: (work_completion)(&(&net->ipv6.addr_chk_work)->work) ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 27 BD: 6 ....: &x->wait#20 ->&p->pi_lock FD: 48 BD: 93 ++--: &ndev->lock ->&ifa->lock ->pool_lock#2 ->&dir->lock#2 ->pcpu_lock ->&obj_hash[i].lock ->&____s->seqcount ->&tb->tb6_lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->batched_entropy_u32.lock ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 3 BD: 1 +.+.: stp_proto_mutex ->llc_sap_list_lock FD: 1 BD: 1 ....: switchdev_notif_chain.lock FD: 26 BD: 52 ++++: (switchdev_blocking_notif_chain).rwsem ->&rq->__lock FD: 712 BD: 1 +.+.: br_ioctl_mutex ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock FD: 256 BD: 8 +.+.: nf_ct_proto_mutex ->defrag4_mutex ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 ->defrag6_mutex ->&rq->__lock ->&cfs_rq->removed.lock FD: 212 BD: 5 +.+.: ebt_mutex ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->&mm->mmap_lock ->&c->lock ->&____s->seqcount#2 ->stock_lock ->rcu_node_0 ->&rq->__lock ->ebt_mutex.wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->&n->list_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 1 BD: 1 +.+.: dsa_tag_drivers_lock FD: 1 BD: 1 +...: protocol_list_lock FD: 1 BD: 1 +...: linkfail_lock FD: 1 BD: 4 +...: rose_neigh_list_lock FD: 1 BD: 1 +.+.: proto_tab_lock#2 FD: 1 BD: 25 ++++: chan_list_lock FD: 1 BD: 2 +.+.: l2cap_sk_list.lock FD: 3 BD: 1 +.+.: sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP ->slock-AF_BLUETOOTH-BTPROTO_L2CAP ->chan_list_lock FD: 1 BD: 2 +...: slock-AF_BLUETOOTH-BTPROTO_L2CAP FD: 1 BD: 1 ....: rfcomm_wq.lock FD: 1 BD: 1 +.+.: rfcomm_mutex FD: 1 BD: 1 +.+.: auth_domain_lock FD: 1 BD: 1 +.+.: registered_mechs_lock FD: 1 BD: 1 ....: atm_dev_notify_chain.lock FD: 1 BD: 1 +.+.: proto_tab_lock#3 FD: 712 BD: 1 +.+.: vlan_ioctl_mutex ->&mm->mmap_lock ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&sem->wait_lock FD: 1 BD: 1 +.+.: rds_info_lock FD: 40 BD: 2 ++++: rds_trans_sem ->(console_sem).lock FD: 1 BD: 1 ....: &id_priv->lock FD: 2 BD: 53 +.+.: &xa->xa_lock#15 ->pool_lock#2 FD: 49 BD: 55 +.+.: k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->&tcp_hashinfo.bhash[i].lock ->&h->lhash2[i].lock ->&table->hash[i].lock ->&rq->__lock ->k-clock-AF_INET6 ->&icsk->icsk_accept_queue.rskq_lock#2 ->&obj_hash[i].lock FD: 27 BD: 57 +...: k-slock-AF_INET6 ->pool_lock#2 ->&obj_hash[i].lock ->&tcp_hashinfo.bhash[i].lock ->key#24 FD: 1 BD: 87 ++..: k-clock-AF_INET6 FD: 17 BD: 78 +.-.: &tcp_hashinfo.bhash[i].lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&tcp_hashinfo.bhash2[i].lock ->k-clock-AF_INET6 ->clock-AF_INET ->clock-AF_INET6 ->&obj_hash[i].lock ->stock_lock FD: 16 BD: 79 +.-.: &tcp_hashinfo.bhash2[i].lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->k-clock-AF_INET6 ->clock-AF_INET ->clock-AF_INET6 ->&obj_hash[i].lock ->batched_entropy_u8.lock ->&hashinfo->ehash_locks[i] ->stock_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 70 +.+.: &h->lhash2[i].lock FD: 1 BD: 5 +...: &list->lock#4 FD: 1 BD: 6 +...: k-clock-AF_TIPC FD: 35 BD: 5 +.+.: k-sk_lock-AF_TIPC ->k-slock-AF_TIPC ->&tn->nametbl_lock ->&obj_hash[i].lock ->k-clock-AF_TIPC ->&rq->__lock FD: 1 BD: 6 +...: k-slock-AF_TIPC FD: 19 BD: 6 +...: &tn->nametbl_lock ->pool_lock#2 ->&service->lock ->&c->lock ->&____s->seqcount ->&nt->cluster_scope_lock ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 17 BD: 7 +...: &service->lock ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 26 BD: 56 +.+.: &pnettable->lock ->&rq->__lock FD: 26 BD: 56 +.+.: smc_ib_devices.mutex ->&rq->__lock FD: 1 BD: 1 +.+.: smc_wr_rx_hash_lock FD: 1 BD: 1 +.+.: v9fs_trans_lock FD: 1 BD: 5 +...: &this->receive_lock FD: 1 BD: 1 +...: lowpan_nhc_lock FD: 263 BD: 7 +.+.: ovs_mutex ->(work_completion)(&data->gc_work) ->nf_ct_proto_mutex ->&obj_hash[i].lock ->pool_lock#2 ->net_rwsem ->quarantine_lock ->&rq->__lock FD: 254 BD: 9 +.+.: defrag4_mutex ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock FD: 254 BD: 9 +.+.: defrag6_mutex ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock FD: 40 BD: 2 +.+.: drain_vmap_work ->vmap_purge_lock FD: 1 BD: 6 +.+.: ima_keys_lock FD: 69 BD: 100 +.+.: scomp_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock FD: 6 BD: 544 +.+.: &mm->page_table_lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock FD: 50 BD: 544 +.+.: ptlock_ptr(ptdesc)#2 ->lock#4 ->key ->&____s->seqcount ->lock#5 ->&folio_wait_table[i] ->&obj_hash[i].lock ->&((cluster_info + ci)->lock)#2 ->mmlist_lock ->&xa->xa_lock#23 ->&cache->free_lock ->&lruvec->lru_lock FD: 288 BD: 5 +.+.: k-sk_lock-AF_RXRPC ->k-slock-AF_RXRPC ->&rxnet->local_mutex ->&local->services_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&rx->incoming_lock ->&obj_hash[i].lock ->&rxnet->conn_lock ->&call->waitq ->(rxrpc_call_limiter).lock ->&rx->recvmsg_lock ->&rx->call_lock ->&rxnet->call_lock ->(&call->timer) ->&base->lock ->&list->lock#17 ->quarantine_lock ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&n->list_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 1 BD: 6 +...: k-slock-AF_RXRPC FD: 275 BD: 6 +.+.: &rxnet->local_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->crngs.lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&c->lock ->&dir->lock ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->cpu_hotplug_lock ->&rq->__lock ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&x->wait#21 ->&n->list_lock ->&cfs_rq->removed.lock ->stock_lock ->&____s->seqcount#2 ->key ->pcpu_lock ->percpu_counters_lock ->&table->hash[i].lock ->k-clock-AF_INET6 ->&xa->xa_lock#9 ->&fsnotify_mark_srcu FD: 6 BD: 71 +...: &table->hash[i].lock ->k-clock-AF_INET6 ->&table->hash2[i].lock ->k-clock-AF_INET ->clock-AF_INET ->clock-AF_INET6 FD: 1 BD: 72 +...: &table->hash2[i].lock FD: 256 BD: 2 +.+.: netstamp_work ->cpu_hotplug_lock FD: 27 BD: 7 ....: &x->wait#21 ->&p->pi_lock FD: 1 BD: 6 +.+.: &local->services_lock FD: 1 BD: 8 +.+.: &rxnet->conn_lock FD: 1 BD: 6 ....: &call->waitq FD: 1 BD: 6 +.+.: &rx->call_lock FD: 1 BD: 6 +.+.: &rxnet->call_lock FD: 31 BD: 5 +.-.: (&rxnet->peer_keepalive_timer) FD: 74 BD: 1 +.+.: init_user_ns.keyring_sem ->key_user_lock ->root_key_user.lock ->fs_reclaim ->pool_lock#2 ->crngs.lock ->key_serial_lock ->key_construction_mutex ->&type->lock_class ->keyring_serialise_link_lock FD: 1 BD: 5 +.+.: root_key_user.lock FD: 30 BD: 5 +.+.: (wq_completion)krxrpcd ->(work_completion)(&rxnet->peer_keepalive_work) ->(work_completion)(&rxnet->service_conn_reaper) ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 27 BD: 6 +.+.: (work_completion)(&rxnet->peer_keepalive_work) ->&rxnet->peer_hash_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 1 BD: 7 +.+.: &rxnet->peer_hash_lock FD: 1 BD: 8 +.+.: keyring_name_lock FD: 1 BD: 1 +.+.: template_list FD: 1 BD: 1 +.+.: idr_lock FD: 66 BD: 1 +.+.: ima_extend_list_mutex ->fs_reclaim ->pool_lock#2 FD: 27 BD: 4 +.+.: deferred_probe_work ->deferred_probe_mutex ->&rq->__lock FD: 26 BD: 52 +.+.: &(&net->nexthop.notifier_chain)->rwsem ->&rq->__lock FD: 42 BD: 53 +.+.: k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->&table->hash[i].lock ->&obj_hash[i].lock ->k-clock-AF_INET ->&rq->__lock FD: 24 BD: 54 +...: k-slock-AF_INET#2 ->pool_lock#2 ->&____s->seqcount ->batched_entropy_u32.lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount#2 ->&n->list_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 1 BD: 72 ++..: k-clock-AF_INET FD: 1 BD: 1 ....: power_off_handler_list.lock FD: 712 BD: 2 +.+.: reg_work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 52 +...: reg_pending_beacons_lock FD: 724 BD: 2 +.+.: (work_completion)(&fw_work->work) ->fs_reclaim ->pool_lock#2 ->&fw_cache.lock ->&c->lock ->&____s->seqcount ->tk_core.seq.seqcount ->async_lock ->init_task.alloc_lock ->&dentry->d_lock ->&sb->s_type->i_mutex_key ->&obj_hash[i].lock ->(console_sem).lock ->umhelper_sem ->fw_lock ->rtnl_mutex FD: 2 BD: 3 +.+.: &fw_cache.lock ->pool_lock#2 FD: 2 BD: 406 +.+.: &____s->seqcount#6 ->&____s->seqcount#6/1 FD: 3 BD: 4 +.+.: subsys mutex#73 ->&k->k_lock FD: 37 BD: 17 +.+.: fw_lock ->dpm_list_mtx ->&x->wait#22 ->dpm_list_mtx.wait_lock ->&p->pi_lock ->&rq->__lock ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock FD: 1 BD: 18 ....: &x->wait#22 FD: 1 BD: 1 +.+.: detector_work FD: 1 BD: 1 +.+.: acpi_gpio_deferred_req_irqs_lock FD: 1 BD: 1 ....: enable_lock FD: 1 BD: 1 +.+.: gpd_list_lock FD: 1 BD: 1 +.+.: cdev_lock FD: 291 BD: 3 +.+.: &tty->legacy_mutex ->&tty->read_wait ->&tty->write_wait ->&tty->ldisc_sem ->&tty->files_lock ->&port->lock ->&port->mutex ->&port_lock_key ->&f->f_lock ->&obj_hash[i].lock ->pool_lock#2 ->tasklist_lock FD: 1 BD: 8 ....: &tty->read_wait FD: 27 BD: 284 ....: &tty->write_wait ->&p->pi_lock FD: 283 BD: 4 ++++: &tty->ldisc_sem ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->&tty->write_wait ->&tty->read_wait ->&tty->termios_rwsem ->&mm->mmap_lock ->&port_lock_key ->&port->lock ->&tty->flow.lock ->&ldata->atomic_read_lock FD: 181 BD: 7 ++++: &tty->termios_rwsem ->&port->mutex ->&tty->write_wait ->&tty->read_wait ->&ldata->output_lock ->&port_lock_key FD: 1 BD: 6 +.+.: &tty->files_lock FD: 1 BD: 284 ....: &port->lock FD: 27 BD: 404 ....: &wq#2 ->&p->pi_lock FD: 410 BD: 1 +.+.: &type->s_umount_key#24/1 ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->&obj_hash[i].lock ->&wq->mutex ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->wq_pool_mutex ->mmu_notifier_invalidate_range_start ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->wq_mayday_lock ->&sbi->old_work_lock ->(work_completion)(&(&sbi->old_work)->work) ->&x->wait#23 FD: 26 BD: 13 +.+.: &bdev->bd_holder_lock ->&rq->__lock FD: 27 BD: 293 -.-.: bit_wait_table + i ->&p->pi_lock FD: 80 BD: 114 ++++: &ei->xattr_sem ->mmu_notifier_invalidate_range_start ->lock#4 ->&mapping->i_private_lock ->pool_lock#2 ->&ret->b_state_lock ->&journal->j_revoke_lock ->tk_core.seq.seqcount ->&ei->i_raw_lock ->&____s->seqcount ->&xa->xa_lock#9 ->stock_lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->&c->lock ->bit_wait_table + i ->&n->list_lock FD: 1 BD: 2 +.+.: &sbi->old_work_lock FD: 1 BD: 2 +.+.: (work_completion)(&(&sbi->old_work)->work) FD: 1 BD: 9 ....: &x->wait#23 FD: 412 BD: 1 +.+.: &type->s_umount_key#25/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->&c->lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#3 ->lock#5 ->crypto_alg_sem ->lock#2 ->&lruvec->lru_lock ->&x->wait#23 FD: 417 BD: 1 +.+.: &type->s_umount_key#26/1 ->fs_reclaim ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->mmu_notifier_invalidate_range_start ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#3 ->lock#5 ->&lruvec->lru_lock ->crypto_alg_sem ->pool_lock#2 ->percpu_counters_lock ->&sb->s_type->i_lock_key#22 ->&sb->s_type->i_mutex_key#8 ->proc_subdir_lock ->proc_inum_ida.xa_lock ->&journal->j_state_lock ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&journal->j_wait_done_commit ->&p->alloc_lock ->cpu_hotplug_lock ->wq_pool_mutex ->&ei->i_es_lock ->ext4_grpinfo_slab_create_mutex ->&s->s_inode_list_lock ->ext4_li_mtx ->lock ->&root->kernfs_rwsem ->(console_sem).lock ->&dentry->d_lock FD: 33 BD: 116 +.+.: &bgl->locks[i].lock ->&sbi->s_md_lock ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&ei->i_prealloc_lock ->&base->lock ->key#27 FD: 52 BD: 278 +.+.: &sb->s_type->i_lock_key#22 ->&dentry->d_lock ->&lru->node[i].lock ->&xa->xa_lock#9 ->bit_wait_table + i FD: 221 BD: 5 ++++: &sb->s_type->i_mutex_key#8 ->&ei->i_es_lock ->&ei->i_data_sem ->&ei->xattr_sem ->tk_core.seq.seqcount ->&mm->mmap_lock ->fs_reclaim ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&mapping->i_private_lock ->&sb->s_type->i_lock_key#22 ->&wb->list_lock ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->&c->lock ->&rq->__lock ->rcu_node_0 ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu ->mapping.invalidate_lock ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->pcpu_alloc_mutex ->batched_entropy_u32.lock ->swap_cgroup_mutex ->&fq->mq_flush_lock ->&x->wait#26 ->&base->lock ->(&timer.timer) ->&((cluster_info + ci)->lock)/1 ->swapon_mutex ->proc_poll_wait.lock ->&dentry->d_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&n->list_lock ->&meta->lock ->&rcu_state.expedited_wq ->quarantine_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&sem->wait_lock ->&p->pi_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 21 BD: 115 ++++: &ei->i_es_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&sbi->s_es_lock ->&obj_hash[i].lock ->key#2 ->key#6 ->key#7 ->key#8 ->&____s->seqcount#2 ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock ->&base->lock FD: 101 BD: 114 ++++: &ei->i_data_sem ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&ei->i_es_lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&ei->i_prealloc_lock ->&sb->s_type->i_lock_key#22 ->&(ei->i_block_reservation_lock) ->&lg->lg_mutex ->&mapping->i_private_lock ->&ei->i_raw_lock ->&rq->__lock ->&wb->list_lock ->lock#4 ->&ret->b_state_lock ->&journal->j_revoke_lock ->key#15 ->&sbi->s_md_lock ->key#3 ->&____s->seqcount#2 ->remove_cache_srcu ->quarantine_lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&n->list_lock ->&bgl->locks[i].lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&base->lock ->&wb->work_lock ->&pa->pa_lock#2 ->&xa->xa_lock#9 ->&sem->wait_lock ->bit_wait_table + i FD: 1 BD: 116 +.+.: &sbi->s_es_lock FD: 70 BD: 115 ++++: &journal->j_state_lock ->&journal->j_wait_done_commit ->&journal->j_wait_commit ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&base->lock ->&journal->j_wait_updates ->&journal->j_wait_transaction_locked ->&journal->j_list_lock ->&journal->j_wait_reserved FD: 27 BD: 116 ....: &journal->j_wait_done_commit ->&p->pi_lock FD: 27 BD: 116 ....: &journal->j_wait_commit ->&p->pi_lock FD: 134 BD: 2 +.+.: ext4_grpinfo_slab_create_mutex ->slab_mutex FD: 70 BD: 3 +.+.: ext4_li_mtx ->fs_reclaim ->pool_lock#2 ->batched_entropy_u16.lock ->&eli->li_list_mtx ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock FD: 1 BD: 1 ....: &rs->lock FD: 207 BD: 6 ++++: &type->i_mutex_dir_key#3 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&ei->i_es_lock ->&ei->i_data_sem ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->inode_hash_lock ->&obj_hash[i].lock ->&journal->j_state_lock ->&sb->s_type->i_lock_key#22 ->&ei->xattr_sem ->&c->lock ->namespace_sem ->tomoyo_ss ->&s->s_inode_list_lock ->jbd2_handle ->&mm->mmap_lock ->quarantine_lock ->&n->list_lock ->remove_cache_srcu ->&____s->seqcount#2 ->rcu_node_0 ->&sem->wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&base->lock ->&meta->lock ->&rcu_state.expedited_wq ->rename_lock ->&dentry->d_lock/1 ->smack_known_lock ->&lruvec->lru_lock FD: 40 BD: 54 +.+.: rcu_state.barrier_mutex ->rcu_state.barrier_lock ->&x->wait#24 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_state.barrier_mutex.wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 27 BD: 55 ..-.: &x->wait#24 ->&p->pi_lock FD: 199 BD: 102 ++++: &vma->vm_lock->lock ->ptlock_ptr(ptdesc)#2 ->fs_reclaim ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->mapping.invalidate_lock ->pool_lock#2 ->&rq->__lock ->rcu_node_0 ->&c->lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&rcu_state.gp_wq ->&folio_wait_table[i] ->remove_cache_srcu ->&lruvec->lru_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&mm->page_table_lock ->&rcu_state.expedited_wq ->stock_lock ->&xa->xa_lock#9 ->&sb->s_type->i_lock_key ->&info->lock ->lock#4 ->tk_core.seq.seqcount ->mount_lock ->&cfs_rq->removed.lock ->&n->list_lock ->&____s->seqcount#2 ->&base->lock ->&((cluster_info + ci)->lock)#2 ->cgroup_file_kn_lock ->&vmpr->sr_lock ->lock#5 ->&mapping->i_private_lock ->cgroup_rstat_lock ->&xa->xa_lock#23 ->&ctrl->lock#2 ->&tree->lock ->&acomp_ctx->mutex ->&cache->free_lock ->&cache->alloc_lock ->memcg_oom_lock ->&sb->s_type->i_lock_key#3 ->&rtpn->lock FD: 62 BD: 543 ++++: &anon_vma->rwsem ->&mm->page_table_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->mmu_notifier_invalidate_range_start ->ptlock_ptr(ptdesc)#2 ->quarantine_lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->stock_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->&meta->lock ->kfence_freelist_lock ->&base->lock ->&sem->wait_lock ->batched_entropy_u8.lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 224 BD: 1 +.+.: &sig->cred_guard_mutex ->fs_reclaim ->pool_lock#2 ->init_fs.lock ->&p->pi_lock ->mapping.invalidate_lock ->&folio_wait_table[i] ->&rq->__lock ->tomoyo_ss ->binfmt_lock ->init_binfmt_misc.entries_lock ->&dentry->d_lock ->&type->i_mutex_dir_key#3 ->&sb->s_type->i_lock_key#22 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&ei->xattr_sem ->&tsk->futex_exit_mutex ->&sig->exec_update_lock ->&fs->lock ->lock#4 ->&sb->s_type->i_mutex_key#8 ->&p->alloc_lock ->tk_core.seq.seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&mm->mmap_lock ->&stopper->lock ->&stop_pi_lock ->&x->wait#8 ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->&n->list_lock ->remove_cache_srcu ->key#5 ->rcu_node_0 ->quarantine_lock ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->stock_lock ->pool_lock FD: 151 BD: 104 ++++: mapping.invalidate_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->&ei->i_es_lock ->&ei->i_data_sem ->&c->lock ->pool_lock#2 ->tk_core.seq.seqcount ->&folio_wait_table[i] ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&obj_hash[i].lock ->&n->list_lock ->&mapping->i_mmap_rwsem ->&journal->j_state_lock ->jbd2_handle FD: 1 BD: 4 ++++: init_binfmt_misc.entries_lock FD: 212 BD: 2 +.+.: &sig->exec_update_lock ->&p->alloc_lock ->&sighand->siglock ->&newf->file_lock ->batched_entropy_u64.lock ->&mm->mmap_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&rq->__lock ->pool_lock ->stock_lock ->&cfs_rq->removed.lock ->quarantine_lock ->&sem->wait_lock ->&p->pi_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 3 BD: 25 ..-.: batched_entropy_u16.lock ->crngs.lock FD: 1 BD: 545 +.+.: ptlock_ptr(ptdesc)#2/1 FD: 31 BD: 1 ..-.: &(&gc_work->dwork)->timer FD: 39 BD: 2 +.+.: (work_completion)(&(&gc_work->dwork)->work) ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&obj_hash[i].lock ->&base->lock ->&cfs_rq->removed.lock ->pool_lock#2 FD: 1 BD: 128 ...-: &____s->seqcount#7 FD: 31 BD: 1 ..-.: &(&ipvs->defense_work)->timer FD: 33 BD: 6 +.+.: (work_completion)(&(&ipvs->defense_work)->work) ->&s->s_inode_list_lock ->&ipvs->dropentry_lock ->&ipvs->droppacket_lock ->&ipvs->securetcp_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 7 +...: &ipvs->dropentry_lock FD: 1 BD: 7 +...: &ipvs->droppacket_lock FD: 1 BD: 7 +...: &ipvs->securetcp_lock FD: 1 BD: 116 ....: key#2 FD: 1 BD: 17 ..-.: task_group_lock FD: 136 BD: 1 +.+.: &type->s_umount_key#27/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&c->lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#23 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key#9 ->&dentry->d_lock FD: 39 BD: 155 +.+.: &sb->s_type->i_lock_key#23 ->&dentry->d_lock ->&lru->node[i].lock ->&p->pi_lock ->bit_wait_table + i FD: 128 BD: 4 ++++: &sb->s_type->i_mutex_key#9 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#23 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->rename_lock.seqcount ->proc_subdir_lock ->&p->alloc_lock ->&pid->lock ->sysctl_lock ->&____s->seqcount ->namespace_sem ->&c->lock ->tomoyo_ss ->remove_cache_srcu ->&n->list_lock ->&____s->seqcount#2 ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->rcu_node_0 ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.gp_wq ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->key ->pcpu_lock ->percpu_counters_lock FD: 760 BD: 2 +.+.: &p->lock ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&c->lock ->&____s->seqcount ->file_systems_lock ->namespace_sem ->&of->mutex ->remove_cache_srcu ->&n->list_lock ->rcu_node_0 ->&rq->__lock ->module_mutex ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->stock_lock ->sync_timeline_list_lock ->sync_file_list_lock ->binder_dead_nodes_lock ->binder_procs_lock FD: 113 BD: 1 +.+.: &type->s_umount_key#28/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#24 ->crngs.lock ->&root->kernfs_supers_rwsem ->&dentry->d_lock FD: 39 BD: 277 +.+.: &sb->s_type->i_lock_key#24 ->&dentry->d_lock ->&lru->node[i].lock ->bit_wait_table + i ->&p->pi_lock FD: 222 BD: 3 ++++: &type->i_mutex_dir_key#4 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&root->kernfs_rwsem ->mmu_notifier_invalidate_range_start ->iattr_mutex ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&sb->s_type->i_lock_key#24 ->namespace_sem ->&mm->mmap_lock ->vmap_area_lock ->tk_core.seq.seqcount ->quarantine_lock ->remove_cache_srcu ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rq->__lock ->rcu_node_0 ->rename_lock ->&sem->wait_lock ->&p->pi_lock ->&____s->seqcount#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->&rcu_state.gp_wq ->&rcu_state.expedited_wq ->&base->lock ->&meta->lock FD: 66 BD: 204 +.+.: iattr_mutex ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->tk_core.seq.seqcount ->&rq->__lock FD: 81 BD: 1 +.+.: &type->s_umount_key#29 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&sb->s_type->i_lock_key#23 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&dentry->d_lock/1 FD: 27 BD: 153 ....: &x->wait#25 ->&p->pi_lock FD: 39 BD: 8 +.+.: &net->unx.table.locks[i] ->&net->unx.table.locks[i]/1 FD: 857 BD: 1 +.+.: &sb->s_type->i_mutex_key#10 ->&net->unx.table.locks[i] ->&u->lock ->&u->peer_wait ->rlock-AF_UNIX ->pool_lock#2 ->&dir->lock ->&obj_hash[i].lock ->sk_lock-AF_INET ->slock-AF_INET#2 ->clock-AF_INET ->rcu_node_0 ->&rq->__lock ->nl_table_lock ->nl_table_wait.lock ->clock-AF_NETLINK ->&nlk->wait ->(netlink_chain).rwsem ->sk_lock-AF_INET6 ->slock-AF_INET6 ->clock-AF_INET6 ->pool_lock ->&table->hash[i].lock ->&net->packet.sklist_lock ->&po->bind_lock ->sk_lock-AF_PACKET ->slock-AF_PACKET ->fanout_mutex ->&x->wait#2 ->clock-AF_PACKET ->pcpu_lock ->cb_lock ->genl_sk_destructing_waitq.lock ->sk_lock-AF_BLUETOOTH-BTPROTO_HCI ->slock-AF_BLUETOOTH-BTPROTO_HCI ->hci_dev_list_lock ->quarantine_lock ->stock_lock ->clock-AF_ROSE ->sk_lock-AF_ROSE ->slock-AF_ROSE ->sk_lock-AF_VSOCK ->slock-AF_VSOCK ->&____s->seqcount ->clock-AF_NFC ->&cfs_rq->removed.lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&rnp->exp_wq[3] ->&rcu_state.expedited_wq ->key ->percpu_counters_lock ->&rnp->exp_wq[1] ->&base->lock ->&net->ipv4.ra_mutex ->&hashinfo->lock ->&meta->lock ->kfence_freelist_lock ->rlock-AF_PACKET ->&rnp->exp_wq[0] ->&rnp->exp_wq[2] ->&rnp->exp_lock ->rcu_state.exp_mutex ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->&hashinfo->lock#2 ->raw_lock ->clock-AF_IEEE802154 ->clock-AF_RDS ->&rs->rs_recv_lock ->rds_cong_monitor_lock ->rds_cong_lock ->&rs->rs_lock ->&rs->rs_rdma_lock ->&q->lock ->rds_sock_lock ->pfkey_mutex ->clock-AF_KEY ->sk_lock-AF_NFC ->slock-AF_NFC ->rds_ib_devices_lock ->rlock-AF_KEY ->&net->xdp.lock ->&xs->map_list_lock ->&xs->mutex ->clock-AF_XDP ->sk_lock-AF_PPPOX ->slock-AF_PPPOX ->sk_lock-AF_TIPC ->slock-AF_TIPC ->sk_lock-AF_KCM ->slock-AF_KCM ->&mux->lock ->(work_completion)(&kcm->tx_work) ->&mux->rx_lock ->&knet->mutex ->rlock-AF_CAIF ->sk_lock-AF_CAIF ->slock-AF_CAIF ->pfkey_mutex.wait_lock FD: 47 BD: 18 +.+.: &u->lock ->clock-AF_UNIX ->&u->lock/1 ->rlock-AF_UNIX ->&u->peer_wait ->&sk->sk_peer_lock ->&ei->socket.wq.wait FD: 1 BD: 19 +...: clock-AF_UNIX FD: 27 BD: 19 +.+.: &u->peer_wait ->&p->pi_lock FD: 1 BD: 19 +.+.: rlock-AF_UNIX FD: 224 BD: 1 .+.+: sb_writers#3 ->mount_lock ->tk_core.seq.seqcount ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#22 ->&wb->list_lock ->&wb->work_lock ->&type->i_mutex_dir_key#3 ->&type->i_mutex_dir_key#3/1 ->&base->lock ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->bit_wait_table + i ->&rq->__lock ->remove_cache_srcu ->&zone->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&sb->s_type->i_mutex_key#8 ->tomoyo_ss ->&s->s_inode_list_lock ->sb_internal ->inode_hash_lock ->&fsnotify_mark_srcu ->&n->list_lock ->&____s->seqcount#2 ->rcu_node_0 ->&sem->wait_lock ->&p->pi_lock ->&cfs_rq->removed.lock ->quarantine_lock ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->fs_reclaim ->&mm->mmap_lock ->&p->alloc_lock ->&f->f_lock ->lock#5 ->&lruvec->lru_lock FD: 211 BD: 2 .+.+: sb_writers#4 ->mount_lock ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#23 ->&wb->list_lock ->&sb->s_type->i_mutex_key#9 ->sysctl_lock ->&dentry->d_lock ->tomoyo_ss ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&obj_hash[i].lock ->oom_adj_mutex ->&p->pi_lock ->&c->lock ->&____s->seqcount#10 ->&(&net->ipv4.ping_group_range.lock)->lock ->&rq->__lock ->remove_cache_srcu ->rcu_node_0 ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->oom_adj_mutex.wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->&____s->seqcount FD: 1 BD: 5 +.+.: &pid->lock FD: 198 BD: 1 ++++: &type->s_umount_key#30 ->&lru->node[i].lock ->&dentry->d_lock ->&sb->s_type->i_lock_key#22 ->&sbi->s_writepages_rwsem ->&sem->waiters ->&rsp->gp_wait ->&journal->j_state_lock ->&p->alloc_lock ->(work_completion)(&sbi->s_sb_upd_work) ->key#3 ->key#4 ->&sbi->s_error_lock ->mmu_notifier_invalidate_range_start ->batched_entropy_u8.lock ->kfence_freelist_lock ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&base->lock ->bit_wait_table + i ->&rq->__lock ->ext4_li_mtx ->(console_sem).lock ->mount_lock ->&xa->xa_lock#9 ->pool_lock#2 ->&eli->li_list_mtx ->&wb->list_lock ->&s->s_inode_list_lock ->&ei->i_es_lock ->inode_hash_lock ->&fsnotify_mark_srcu FD: 135 BD: 4 ++++: &sbi->s_writepages_rwsem ->&rsp->gp_wait ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->pool_lock#2 ->lock#4 ->lock#5 ->&journal->j_state_lock ->jbd2_handle ->tk_core.seq.seqcount ->&xa->xa_lock#9 ->&base->lock ->&____s->seqcount#2 ->&rq_wait->wait ->&__ctx->lock ->&n->list_lock ->rcu_node_0 ->&cfs_rq->removed.lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock FD: 1 BD: 2 ....: &sem->waiters FD: 133 BD: 3 +.+.: (work_completion)(&sbi->s_sb_upd_work) ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock FD: 1 BD: 115 ....: key#3 FD: 1 BD: 114 ....: key#4 FD: 1 BD: 114 +.+.: &sbi->s_error_lock FD: 4 BD: 4 +.+.: &eli->li_list_mtx ->&obj_hash[i].lock ->pool_lock#2 FD: 132 BD: 113 ++++: jbd2_handle ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&ret->b_state_lock ->&journal->j_revoke_lock ->&ei->i_raw_lock ->&journal->j_wait_updates ->&mapping->i_private_lock ->&meta_group_info[i]->alloc_sem ->tk_core.seq.seqcount ->inode_hash_lock ->batched_entropy_u32.lock ->&ei->xattr_sem ->&obj_hash[i].lock ->&ei->i_es_lock ->&dentry->d_lock ->smack_known_lock ->&sb->s_type->i_lock_key#22 ->&rq->__lock ->&journal->j_state_lock ->bit_wait_table + i ->lock#4 ->lock#5 ->&ei->i_data_sem ->&xa->xa_lock#9 ->&sbi->s_orphan_lock ->&journal->j_list_lock ->key#3 ->key#4 ->&sbi->s_error_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->&____s->seqcount#2 ->&rq_wait->wait ->&__ctx->lock ->&n->list_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&cfs_rq->removed.lock ->&meta->lock ->&bgl->locks[i].lock ->smack_known_lock.wait_lock ->&p->pi_lock ->&folio_wait_table[i] ->&lock->wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->quarantine_lock ->&base->lock ->&ei->i_prealloc_lock ->key#27 ->&sem->wait_lock ->&lruvec->lru_lock FD: 65 BD: 118 +.+.: &ret->b_state_lock ->&journal->j_list_lock ->&obj_hash[i].lock ->bit_wait_table + i FD: 64 BD: 121 +.+.: &journal->j_list_lock ->&sb->s_type->i_lock_key#3 ->&wb->list_lock ->&wb->work_lock ->key#13 ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock FD: 1 BD: 117 +.+.: &journal->j_revoke_lock FD: 1 BD: 117 +.+.: &ei->i_raw_lock FD: 27 BD: 116 ....: &journal->j_wait_updates ->&p->pi_lock FD: 31 BD: 297 -...: &wb->work_lock ->&obj_hash[i].lock ->&base->lock FD: 54 BD: 114 ++++: &meta_group_info[i]->alloc_sem ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&base->lock ->&____s->seqcount ->&c->lock ->&x->wait#26 ->&__ctx->lock ->rcu_node_0 ->&rq->__lock ->(&timer.timer) ->&fq->mq_flush_lock ->quarantine_lock ->&bgl->locks[i].lock ->&cfs_rq->removed.lock FD: 137 BD: 3 .+.+: sb_internal ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->remove_cache_srcu ->&n->list_lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&base->lock ->pool_lock FD: 2 BD: 118 ++++: &ei->i_prealloc_lock ->&pa->pa_lock#2 FD: 29 BD: 1 .+.+: file_rwsem ->&ctx->flc_lock ->&rq->__lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 2 BD: 2 +.+.: &ctx->flc_lock ->&fll->lock FD: 1 BD: 3 +.+.: &fll->lock FD: 176 BD: 2 +.+.: &type->i_mutex_dir_key#3/1 ->rename_lock.seqcount ->&dentry->d_lock ->fs_reclaim ->&ei->i_es_lock ->&ei->i_data_sem ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->pool_lock#2 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->inode_hash_lock ->&c->lock ->&obj_hash[i].lock ->&journal->j_state_lock ->&sb->s_type->i_lock_key#22 ->&ei->xattr_sem ->tomoyo_ss ->&s->s_inode_list_lock ->jbd2_handle ->&sb->s_type->i_mutex_key#8 ->&xa->xa_lock#5 ->stock_lock ->&fsnotify_mark_srcu ->&type->i_mutex_dir_key#3 ->&wb->list_lock ->sb_internal ->&____s->seqcount#2 ->rcu_node_0 ->remove_cache_srcu ->&n->list_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->quarantine_lock ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->&meta->lock ->&base->lock ->&sem->wait_lock ->&p->pi_lock FD: 93 BD: 1 +.+.: &type->s_umount_key#31/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#25 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key#11 ->&dentry->d_lock FD: 38 BD: 3 +.+.: &sb->s_type->i_lock_key#25 ->&dentry->d_lock FD: 78 BD: 2 +.+.: &sb->s_type->i_mutex_key#11 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#25 ->&s->s_inode_list_lock ->tk_core.seq.seqcount FD: 78 BD: 1 +.+.: &type->s_umount_key#32 ->sb_lock ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&lru->node[i].lock ->&obj_hash[i].lock FD: 40 BD: 1 +.+.: &type->s_umount_key#33 ->sb_lock ->&dentry->d_lock FD: 40 BD: 1 +.+.: &type->s_umount_key#34 ->sb_lock ->&dentry->d_lock FD: 92 BD: 1 +.+.: &type->s_umount_key#35/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#26 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 4 +.+.: &sb->s_type->i_lock_key#26 ->&dentry->d_lock FD: 40 BD: 1 +.+.: &type->s_umount_key#36 ->sb_lock ->&dentry->d_lock FD: 1 BD: 1 +.+.: redirect_lock FD: 281 BD: 1 +.+.: &tty->atomic_write_lock ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&tty->termios_rwsem ->&tty->files_lock FD: 37 BD: 8 +.+.: &ldata->output_lock ->&port_lock_key ->&rq->__lock FD: 93 BD: 1 +.+.: &type->s_umount_key#37/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#27 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->fuse_mutex ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#27 ->&dentry->d_lock FD: 1 BD: 2 +.+.: fuse_mutex FD: 98 BD: 1 +.+.: &type->s_umount_key#38/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->&c->lock ->&____s->seqcount ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#28 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->pstore_sb_lock ->&sb->s_type->i_mutex_key#12 ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#28 ->&dentry->d_lock FD: 74 BD: 4 +.+.: &sb->s_type->i_mutex_key#12 ->fs_reclaim ->&zone->lock ->&____s->seqcount ->&psinfo->read_mutex ->&obj_hash[i].lock FD: 73 BD: 5 +.+.: &psinfo->read_mutex ->(efivars_lock).lock ->fs_reclaim ->pool_lock#2 ->(efi_runtime_lock).lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#12 FD: 96 BD: 1 +.+.: &type->s_umount_key#39/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#29 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->bpf_preload_lock ->&dentry->d_lock FD: 38 BD: 2 +.+.: &sb->s_type->i_lock_key#29 ->&dentry->d_lock FD: 73 BD: 2 +.+.: bpf_preload_lock ->(kmod_concurrent_max).lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&x->wait#16 ->&rq->__lock ->key ->pcpu_lock ->percpu_counters_lock ->running_helpers_waitq.lock FD: 27 BD: 1 ++++: uts_sem ->hostname_poll.wait.lock ->&rq->__lock FD: 200 BD: 3 ++++: &type->i_mutex_dir_key#5 ->fs_reclaim ->&dentry->d_lock ->rename_lock.seqcount ->tomoyo_ss ->&sbinfo->stat_lock ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&xattrs->lock ->&obj_hash[i].lock ->&simple_offset_xa_lock ->smack_known_lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->remove_cache_srcu ->&rq->__lock ->rcu_node_0 ->&sem->wait_lock ->&rcu_state.gp_wq ->key ->pcpu_lock ->percpu_counters_lock ->&p->pi_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&mm->mmap_lock ->vmap_area_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->smack_known_lock.wait_lock ->&rcu_state.expedited_wq ->&base->lock ->stock_lock ->&meta->lock FD: 213 BD: 2 .+.+: sb_writers#5 ->mount_lock ->&type->i_mutex_dir_key#5 ->&type->i_mutex_dir_key#5/1 ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key ->&wb->list_lock ->&sb->s_type->i_mutex_key#13 ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock ->&s->s_inode_list_lock ->&info->lock ->&obj_hash[i].lock ->pool_lock#2 ->&sbinfo->stat_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->tomoyo_ss ->&xattrs->lock ->fs_reclaim ->lock#4 ->lock#5 ->&lruvec->lru_lock ->&dentry->d_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->&meta->lock ->kfence_freelist_lock ->&base->lock ->quarantine_lock FD: 28 BD: 1 +.-.: (&cb->timer) ->&obj_hash[i].lock ->&base->lock ->tk_core.seq.seqcount ->&rq_wait->wait FD: 116 BD: 3 +.+.: &type->i_mutex_dir_key#5/1 ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->tomoyo_ss ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&c->lock ->&____s->seqcount ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&xattrs->lock ->&obj_hash[i].lock ->&simple_offset_xa_lock ->smack_known_lock ->&u->bindlock ->pool_lock#2 ->&sb->s_type->i_mutex_key#13/4 ->&sem->wait_lock ->&rq->__lock ->&sb->s_type->i_mutex_key#13 ->&fsnotify_mark_srcu ->lock#4 ->lock#5 ->&lruvec->lru_lock ->&info->lock ->&xa->xa_lock#9 ->&n->list_lock ->rcu_node_0 ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->pool_lock ->&rcu_state.gp_wq ->&rcu_state.expedited_wq ->remove_cache_srcu ->&p->pi_lock ->smack_known_lock.wait_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->key#9 ->&meta->lock ->&base->lock FD: 7 BD: 11 +.+.: &f->f_lock ->fasync_lock FD: 1 BD: 2 ....: hostname_poll.wait.lock FD: 776 BD: 1 +.+.: &f->f_pos_lock ->&type->i_mutex_dir_key#3 ->&mm->mmap_lock ->&type->i_mutex_dir_key#4 ->sb_writers#5 ->&type->i_mutex_dir_key#5 ->&p->lock ->sysctl_lock ->fs_reclaim ->&zone->lock ->&____s->seqcount ->&obj_hash[i].lock ->&rq->__lock ->rcu_node_0 ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock ->sb_writers#12 ->&lock->wait_lock ->sb_writers#4 ->stock_lock FD: 202 BD: 101 +.+.: &mm->mmap_lock/1 ->fs_reclaim ->pool_lock#2 ->&c->lock ->&vma->vm_lock->lock ->&mapping->i_mmap_rwsem ->&anon_vma->rwsem ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&mm->page_table_lock ->ptlock_ptr(ptdesc)#2 ->&rq->__lock ->remove_cache_srcu ->&n->list_lock ->&sem->wait_lock ->&p->pi_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->stock_lock ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->&____s->seqcount#2 ->key#22 ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->pool_lock FD: 202 BD: 4 +.+.: &sb->s_type->i_mutex_key#13 ->&xattrs->lock ->tk_core.seq.seqcount ->&mm->mmap_lock ->fs_reclaim ->&____s->seqcount ->&xa->xa_lock#9 ->&sb->s_type->i_lock_key ->&info->lock ->lock#4 ->&wb->list_lock ->key#9 ->&rq->__lock ->&sb->s_type->i_mutex_key#13/4 ->&simple_offset_xa_lock ->&dentry->d_lock ->&mapping->i_mmap_rwsem ->lock#5 ->&lruvec->lru_lock ->&obj_hash[i].lock ->tomoyo_ss ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->rcu_node_0 ->&cfs_rq->removed.lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq FD: 79 BD: 6 +.+.: &u->bindlock ->&net->unx.table.locks[i] ->&net->unx.table.locks[i]/1 ->&bsd_socket_locks[i] ->fs_reclaim ->pool_lock#2 ->batched_entropy_u32.lock FD: 38 BD: 9 +.+.: &net->unx.table.locks[i]/1 ->&dentry->d_lock FD: 1 BD: 7 +.+.: &bsd_socket_locks[i] FD: 220 BD: 1 +.+.: &u->iolock ->rlock-AF_UNIX ->&mm->mmap_lock ->&obj_hash[i].lock ->pool_lock#2 ->&u->peer_wait ->&rq->__lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&base->lock ->&u->lock ->&dir->lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq FD: 29 BD: 265 ..-.: &ei->socket.wq.wait ->&p->pi_lock ->&ep->lock FD: 1 BD: 2 ....: key#5 FD: 1 BD: 116 ....: key#6 FD: 1 BD: 116 ....: key#7 FD: 1 BD: 116 ....: key#8 FD: 40 BD: 19 +.+.: &u->lock/1 ->&sk->sk_peer_lock ->&dentry->d_lock ->&sk->sk_peer_lock/1 FD: 213 BD: 1 +.+.: &pipe->mutex/1 ->&pipe->rd_wait ->&pipe->wr_wait ->fs_reclaim ->&____s->seqcount ->&mm->mmap_lock ->&rq->__lock ->&lock->wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->rcu_node_0 ->stock_lock ->pool_lock#2 ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->&base->lock FD: 29 BD: 4 ....: &pipe->rd_wait ->&p->pi_lock ->&ep->lock FD: 29 BD: 4 ....: &pipe->wr_wait ->&p->pi_lock ->&ep->lock FD: 1 BD: 5 ....: key#9 FD: 48 BD: 1 .+.+: sb_writers#6 ->tk_core.seq.seqcount ->mount_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 14 BD: 5 +.-.: (&net->can.stattimer) ->&obj_hash[i].lock ->&base->lock FD: 224 BD: 1 +.+.: sk_lock-AF_NETLINK ->slock-AF_NETLINK ->&mm->mmap_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->&c->lock ->pcpu_alloc_mutex ->&obj_hash[i].lock ->batched_entropy_u32.lock ->vmap_purge_lock ->&fp->aux->used_maps_mutex ->rcu_node_0 ->&rq->__lock ->&cfs_rq->removed.lock ->stock_lock ->&f->f_lock FD: 1 BD: 2 +...: slock-AF_NETLINK FD: 2 BD: 21 +.+.: &sk->sk_peer_lock ->&sk->sk_peer_lock/1 FD: 1 BD: 1 ....: &rs->lock#2 FD: 80 BD: 3 +.+.: oom_adj_mutex ->&p->alloc_lock ->&rq->__lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->oom_adj_mutex.wait_lock ->&rcu_state.gp_wq ->&rcu_state.expedited_wq FD: 115 BD: 1 +.+.: &group->mark_mutex ->&fsnotify_mark_srcu ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->lock ->ucounts_lock ->&mark->lock ->&conn->lock ->&sb->s_type->i_lock_key#22 ->&sb->s_type->i_lock_key ->&rq->__lock ->&____s->seqcount#2 ->&lock->wait_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&n->list_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock FD: 13 BD: 198 +.+.: &group->inotify_data.idr_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 3 BD: 2 +.+.: &mark->lock ->&fsnotify_mark_srcu ->&conn->lock FD: 1 BD: 7 +.+.: &conn->lock FD: 222 BD: 2 +.+.: &ep->mtx ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&f->f_lock ->&ei->socket.wq.wait ->&ep->lock ->&group->notification_waitq ->&group->notification_lock ->&sighand->signalfd_wqh ->&sighand->siglock ->&mm->mmap_lock ->&rq->__lock ->&pipe->rd_wait ->key#10 ->&obj_hash[i].lock ->&lock->wait_lock ->sysctl_lock ->&pipe->wr_wait ->rcu_node_0 ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->quarantine_lock ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->remove_cache_srcu FD: 223 BD: 1 +.+.: epnested_mutex ->&ep->mtx FD: 28 BD: 308 ...-: &ep->lock ->&ep->wq FD: 29 BD: 7 ....: &group->notification_waitq ->&p->pi_lock ->&ep->lock FD: 1 BD: 7 +.+.: &group->notification_lock FD: 29 BD: 156 ....: &sighand->signalfd_wqh ->&ep->lock ->&p->pi_lock FD: 750 BD: 1 .+.+: sb_writers#7 ->mount_lock ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#24 ->&wb->list_lock ->&type->i_mutex_dir_key#4 ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&of->mutex ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&rq->__lock ->&root->kernfs_iattr_rwsem ->&dentry->d_lock ->tomoyo_ss ->iattr_mutex ->&sb->s_type->i_mutex_key#14 ->&xattrs->lock ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->pool_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 27 BD: 116 -...: &x->wait#26 ->&p->pi_lock FD: 1 BD: 122 +.+.: &__ctx->lock FD: 37 BD: 1 +.+.: (wq_completion)kblockd ->(work_completion)(&(&hctx->run_work)->work) ->(work_completion)(&(&q->requeue_work)->work) ->(work_completion)(&q->timeout_work) FD: 34 BD: 115 -.-.: &fq->mq_flush_lock ->tk_core.seq.seqcount ->&q->requeue_lock ->&obj_hash[i].lock ->&x->wait#26 ->bit_wait_table + i FD: 1 BD: 119 -.-.: &q->requeue_lock FD: 31 BD: 1 ..-.: &(&tbl->managed_work)->timer FD: 31 BD: 1 ..-.: &(&krcp->monitor_work)->timer FD: 33 BD: 2 +.+.: (work_completion)(&(&krcp->monitor_work)->work) ->krc.lock ->&obj_hash[i].lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 41 BD: 11 +.+.: swap_lock ->&p->lock#2 FD: 82 BD: 1 .+.+: kn->active ->fs_reclaim ->pool_lock#2 ->&kernfs_locks->open_file_mutex[count] ->&k->list_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount ->remove_cache_srcu FD: 67 BD: 60 +.+.: &kernfs_locks->open_file_mutex[count] ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock ->&____s->seqcount ->&rq->__lock ->remove_cache_srcu ->&____s->seqcount#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&lock->wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->quarantine_lock FD: 749 BD: 6 +.+.: &of->mutex ->pool_lock#2 ->&obj_hash[i].lock ->&rq->__lock ->cgroup_mutex ->&p->pi_lock ->cgroup_mutex.wait_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&root->deactivate_waitq ->&rcu_state.expedited_wq FD: 1 BD: 257 ..-.: rlock-AF_NETLINK FD: 27 BD: 309 ..-.: &ep->wq ->&p->pi_lock FD: 81 BD: 1 .+.+: kn->active#2 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 ->uevent_sock_mutex ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->quarantine_lock ->remove_cache_srcu ->&rq->__lock FD: 1 BD: 6 ....: &nlk->wait FD: 33 BD: 2 +.+.: (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) ->krc.lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 19 BD: 1 +.-.: (&vblank->disable_timer) ->&dev->vbl_lock FD: 31 BD: 1 +.-.: (&q->timeout) FD: 27 BD: 2 +.+.: (work_completion)(&q->timeout_work) FD: 82 BD: 1 .+.+: kn->active#3 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 ->uevent_sock_mutex ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&rq->__lock ->quarantine_lock ->remove_cache_srcu FD: 148 BD: 5 ++++: kn->active#4 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&____s->seqcount ->pool_lock#2 ->uevent_sock_mutex ->&obj_hash[i].lock ->&n->list_lock ->&device->physical_node_lock ->udc_lock ->fw_lock ->remove_cache_srcu ->quarantine_lock ->&rfkill->lock ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&base->lock ->uevent_sock_mutex.wait_lock ->&p->pi_lock ->rcu_node_0 ->&lock->wait_lock ->&meta->lock ->&root->deactivate_waitq ->&rcu_state.expedited_wq ->udc_lock.wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 1 BD: 22 +.+.: &sk->sk_peer_lock/1 FD: 31 BD: 1 ..-.: &(&ovs_net->masks_rebalance)->timer FD: 264 BD: 6 +.+.: (work_completion)(&(&ovs_net->masks_rebalance)->work) ->ovs_mutex ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 72 BD: 1 .+.+: kn->active#5 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->param_lock ->pool_lock#2 ->&on->poll ->&c->lock FD: 72 BD: 2 +.+.: &sb->s_type->i_mutex_key#14 ->tk_core.seq.seqcount ->&root->kernfs_iattr_rwsem ->&sem->wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 54 +.+.: disk_events_mutex FD: 79 BD: 5 +.+.: &sb->s_type->i_mutex_key#13/4 ->&dentry->d_lock ->&simple_offset_xa_lock ->fs_reclaim ->tk_core.seq.seqcount ->rename_lock ->&rq->__lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 31 BD: 404 +.+.: &dentry->d_lock/2 ->&dentry->d_lock/3 FD: 30 BD: 405 +.+.: &dentry->d_lock/3 ->&____s->seqcount#6 ->&wq FD: 1 BD: 407 +.+.: &____s->seqcount#6/1 FD: 68 BD: 1 .+.+: kn->active#6 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock ->remove_cache_srcu FD: 66 BD: 1 .+.+: kn->active#7 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&____s->seqcount ->remove_cache_srcu FD: 68 BD: 1 .+.+: kn->active#8 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock FD: 73 BD: 3 ++++: kn->active#9 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&lock->wait_lock ->&rq->__lock ->&p->pi_lock ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu ->&root->deactivate_waitq FD: 66 BD: 1 .+.+: kn->active#10 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->remove_cache_srcu ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 66 BD: 3 ++++: kn->active#11 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->remove_cache_srcu ->&n->list_lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount FD: 68 BD: 1 .+.+: kn->active#12 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount FD: 68 BD: 1 .+.+: kn->active#13 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 68 BD: 1 .+.+: kn->active#14 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 68 BD: 1 .+.+: kn->active#15 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 68 BD: 1 .+.+: kn->active#16 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 68 BD: 1 .+.+: kn->active#17 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] FD: 68 BD: 1 .+.+: kn->active#18 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 68 BD: 1 .+.+: kn->active#19 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&____s->seqcount FD: 68 BD: 1 .+.+: kn->active#20 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->remove_cache_srcu ->&rq->__lock ->&____s->seqcount#2 FD: 70 BD: 1 .+.+: kn->active#21 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&n->list_lock ->&____s->seqcount#2 FD: 68 BD: 1 .+.+: kn->active#22 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu FD: 70 BD: 1 .+.+: kn->active#23 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock ->&c->lock ->&rq->__lock ->&n->list_lock ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount FD: 70 BD: 1 .+.+: kn->active#24 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock FD: 68 BD: 1 .+.+: kn->active#25 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock FD: 68 BD: 1 .+.+: kn->active#26 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] FD: 68 BD: 1 .+.+: kn->active#27 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 68 BD: 1 .+.+: kn->active#28 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 1 BD: 1 +.+.: &sb->s_type->i_mutex_key#15 FD: 53 BD: 1 .+.+: mapping.invalidate_lock#2 ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->pool_lock#2 ->tk_core.seq.seqcount ->&c->lock ->&n->list_lock FD: 68 BD: 1 .+.+: kn->active#29 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 ->&obj_hash[i].lock FD: 70 BD: 1 .+.+: kn->active#30 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock FD: 76 BD: 1 .+.+: kn->active#31 ->fs_reclaim ->&c->lock ->&n->list_lock ->&kernfs_locks->open_file_mutex[count] ->&dev->power.lock ->pci_lock FD: 68 BD: 1 .+.+: kn->active#32 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 68 BD: 1 .+.+: kn->active#33 ->&rq->__lock ->fs_reclaim ->&c->lock ->&n->list_lock ->&kernfs_locks->open_file_mutex[count] FD: 1 BD: 1 +.+.: &evdev->client_lock FD: 29 BD: 1 +.+.: &evdev->mutex ->&dev->mutex#2 FD: 68 BD: 1 .+.+: kn->active#34 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 72 BD: 1 +.-.: slock-AF_INET/1 ->tk_core.seq.seqcount ->pool_lock#2 ->&____s->seqcount ->&obj_hash[i].lock ->&base->lock ->&c->lock ->&hashinfo->ehash_locks[i] ->&tcp_hashinfo.bhash[i].lock ->&zone->lock ->&n->list_lock ->&meta->lock ->kfence_freelist_lock ->batched_entropy_u8.lock ->quarantine_lock ->&____s->seqcount#2 ->init_task.mems_allowed_seq.seqcount FD: 120 BD: 52 ++++: devnet_rename_sem ->(console_sem).lock ->console_owner_lock ->console_owner ->&rq->__lock ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->&root->kernfs_rwsem ->&sem->wait_lock ->&p->pi_lock ->kernfs_rename_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->quarantine_lock ->&n->list_lock ->&cfs_rq->removed.lock ->remove_cache_srcu ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 243 ....: kernfs_rename_lock FD: 26 BD: 57 +.+.: &nft_net->commit_mutex ->&rq->__lock FD: 1 BD: 56 ....: target_list_lock FD: 265 BD: 2 +.+.: sk_lock-AF_INET ->slock-AF_INET#2 ->&table->hash[i].lock ->&tcp_hashinfo.bhash[i].lock ->&h->lhash2[i].lock ->&icsk->icsk_accept_queue.rskq_lock ->clock-AF_INET ->&obj_hash[i].lock ->&base->lock ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&mm->mmap_lock ->tk_core.seq.seqcount ->&sd->defer_lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu ->once_mutex ->&rq->__lock ->batched_entropy_u32.lock ->batched_entropy_u16.lock ->rcu_node_0 ->&____s->seqcount#2 ->quarantine_lock ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->&meta->lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&f->f_lock FD: 77 BD: 6 +.-.: slock-AF_INET#2 ->&obj_hash[i].lock ->batched_entropy_u16.lock ->&tcp_hashinfo.bhash[i].lock ->&hashinfo->ehash_locks[i] ->tk_core.seq.seqcount ->(&req->rsk_timer) ->&base->lock ->&icsk->icsk_accept_queue.rskq_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&n->list_lock ->&sk->sk_lock.wq ->init_task.mems_allowed_seq.seqcount ->key#24 FD: 1 BD: 82 ++..: clock-AF_INET FD: 68 BD: 1 .+.+: kn->active#35 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 68 BD: 1 .+.+: kn->active#36 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock FD: 68 BD: 1 .+.+: kn->active#37 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 71 BD: 2 +.+.: &dev_instance->mutex ->fs_reclaim ->pool_lock#2 ->vicodec_core:1851:(hdl)->_lock ->&c->lock ->&____s->seqcount ->&vdev->fh_lock ->&n->list_lock ->&m2m_dev->job_spinlock ->&q->done_wq ->&q->mmap_lock ->&obj_hash[i].lock FD: 4 BD: 3 +.+.: vicodec_core:1851:(hdl)->_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 5 ....: &vdev->fh_lock FD: 1 BD: 3 ....: key#10 FD: 76 BD: 1 +.+.: &mdev->req_queue_mutex ->&dev_instance->mutex ->&vdev->fh_lock ->&mdev->graph_mutex ->vicodec_core:1851:(hdl)->_lock ->&obj_hash[i].lock ->pool_lock#2 ->vim2m:1183:(hdl)->_lock ->&dev->dev_mutex ->&dev->mutex#3 FD: 1 BD: 4 ....: &m2m_dev->job_spinlock FD: 1 BD: 6 ....: &q->done_wq FD: 1 BD: 6 +.+.: &q->mmap_lock FD: 1 BD: 1 +.+.: fh->state->lock FD: 71 BD: 2 +.+.: &dev->dev_mutex ->fs_reclaim ->pool_lock#2 ->vim2m:1183:(hdl)->_lock ->&obj_hash[i].lock ->&vdev->fh_lock ->&m2m_dev->job_spinlock ->&q->done_wq ->&q->mmap_lock FD: 4 BD: 3 +.+.: vim2m:1183:(hdl)->_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 68 BD: 1 .+.+: kn->active#38 ->fs_reclaim ->&c->lock ->&____s->seqcount ->&kernfs_locks->open_file_mutex[count] FD: 3 BD: 1 +.+.: &vcapture->lock ->&q->done_wq ->&q->mmap_lock FD: 4 BD: 2 +.+.: &dev->mutex#3 ->&vdev->fh_lock ->&q->done_wq ->&q->mmap_lock FD: 262 BD: 11 +.+.: &lo->lo_mutex ->&rq->__lock ->&lock->wait_lock ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->cpu_hotplug_lock ->wq_pool_mutex ->&bdev->bd_holder_lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->&bdev->bd_size_lock ->&sem->wait_lock ->&p->pi_lock ->&q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->mq_freeze_wq ->&n->list_lock ->remove_cache_srcu ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 275 BD: 15 +.+.: &nbd->config_lock ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&bdev->bd_size_lock ->&q->queue_lock ->&ACCESS_PRIVATE(sdp, lock) ->set->srcu ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#3 ->&c->lock ->&____s->seqcount ->&cfs_rq->removed.lock ->&q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->mq_freeze_wq ->fs_reclaim ->&nsock->tx_lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&n->list_lock ->remove_cache_srcu ->&base->lock ->uevent_sock_mutex ->&lock->wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->pool_lock ->&____s->seqcount#2 ->rcu_node_0 ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->&rcu_state.expedited_wq ->quarantine_lock FD: 31 BD: 8 ....: &ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&obj_hash[i].lock ->&base->lock FD: 2 BD: 10 +.+.: &new->lock ->&mtdblk->cache_mutex FD: 1 BD: 11 +.+.: &mtdblk->cache_mutex FD: 68 BD: 1 .+.+: kn->active#39 ->fs_reclaim ->&c->lock ->&____s->seqcount ->&kernfs_locks->open_file_mutex[count] FD: 211 BD: 1 +.+.: &mtd->master.chrdev_lock ->&rq->__lock ->&mm->mmap_lock FD: 31 BD: 1 ..-.: &(&wb->dwork)->timer FD: 139 BD: 1 +.+.: (wq_completion)writeback ->(work_completion)(&(&wb->dwork)->work) ->(work_completion)(&(&wb->bw_dwork)->work) ->&rq->__lock FD: 137 BD: 2 +.+.: (work_completion)(&(&wb->dwork)->work) ->&wb->work_lock ->&wb->list_lock ->&p->sequence ->key#11 ->&pl->lock ->&rq->__lock FD: 2 BD: 4 +.-.: &p->sequence ->key#14 FD: 1 BD: 297 -...: key#11 FD: 68 BD: 1 .+.+: kn->active#40 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 1 BD: 4 +.+.: destroy_lock FD: 80 BD: 2 +.+.: connector_reaper_work ->destroy_lock ->&ACCESS_PRIVATE(sdp, lock) ->&fsnotify_mark_srcu ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->pool_lock#2 ->quarantine_lock ->pool_lock ->&cfs_rq->removed.lock ->&base->lock ->&meta->lock ->kfence_freelist_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) FD: 31 BD: 1 ..-.: fs/notify/mark.c:89 FD: 79 BD: 2 +.+.: (reaper_work).work ->destroy_lock ->&ACCESS_PRIVATE(sdp, lock) ->&fsnotify_mark_srcu ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->pool_lock#2 ->&base->lock ->&cfs_rq->removed.lock ->pool_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->quarantine_lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) FD: 27 BD: 1 +.-.: (&journal->j_commit_timer) ->&p->pi_lock FD: 79 BD: 1 +.+.: &journal->j_checkpoint_mutex ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->&journal->j_state_lock FD: 27 BD: 116 ....: &journal->j_wait_transaction_locked ->&p->pi_lock FD: 1 BD: 123 -.-.: &memcg->move_lock FD: 1 BD: 117 +.+.: &sbi->s_md_lock FD: 1 BD: 1 ....: &journal->j_fc_wait FD: 1 BD: 1 +.+.: &journal->j_history_lock FD: 12 BD: 53 +...: fib_info_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 21 BD: 53 +...: &net->sctp.local_addr_lock ->&net->sctp.addr_wq_lock FD: 20 BD: 55 +.-.: &net->sctp.addr_wq_lock ->pool_lock#2 ->&obj_hash[i].lock ->&base->lock ->&c->lock ->&n->list_lock ->&____s->seqcount ->k-slock-AF_INET6/1 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&____s->seqcount#2 ->quarantine_lock FD: 70 BD: 55 +.+.: (work_completion)(&ht->run_work) ->&ht->mutex ->&rq->__lock FD: 69 BD: 56 +.+.: &ht->mutex ->fs_reclaim ->pool_lock#2 ->batched_entropy_u32.lock ->rhashtable_bucket ->&ht->lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&obj_hash[i].lock ->&n->list_lock ->&rq->__lock ->remove_cache_srcu ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->rcu_node_0 ->batched_entropy_u8.lock ->&base->lock ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 1 BD: 248 ....: rhashtable_bucket/1 FD: 4 BD: 57 +.+.: &ht->lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 +...: clock-AF_NETLINK FD: 1 BD: 52 +...: _xmit_LOOPBACK FD: 30 BD: 58 .+.+: netpoll_srcu ->&rq->__lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 FD: 13 BD: 63 +.-.: &in_dev->mc_tomb_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->quarantine_lock FD: 19 BD: 59 +.-.: &im->lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&____s->seqcount#2 ->&obj_hash[i].lock ->&n->list_lock ->init_task.mems_allowed_seq.seqcount FD: 1 BD: 58 +.+.: cbs_list_lock FD: 11 BD: 56 +...: &net->ipv6.addrconf_hash_lock ->&obj_hash[i].lock FD: 32 BD: 94 +...: &ifa->lock ->&obj_hash[i].lock ->batched_entropy_u32.lock ->crngs.lock ->&base->lock FD: 42 BD: 95 +...: &tb->tb6_lock ->&net->ipv6.fib6_walker_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->rlock-AF_NETLINK ->&n->list_lock ->rt6_exception_lock ->&data->fib_event_queue_lock ->&____s->seqcount#2 ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&base->lock FD: 1 BD: 96 ++..: &net->ipv6.fib6_walker_lock FD: 255 BD: 17 +.+.: sk_lock-AF_INET6 ->slock-AF_INET6 ->&table->hash[i].lock ->batched_entropy_u32.lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&obj_hash[i].lock ->batched_entropy_u16.lock ->&tcp_hashinfo.bhash[i].lock ->&h->lhash2[i].lock ->fs_reclaim ->&mm->mmap_lock ->once_lock ->&rq->__lock ->clock-AF_INET6 ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 21 BD: 18 +...: slock-AF_INET6 ->&obj_hash[i].lock ->&tcp_hashinfo.bhash[i].lock ->pool_lock#2 ->key#24 FD: 1 BD: 82 ++..: clock-AF_INET6 FD: 1 BD: 1 +.+.: userns_state_mutex FD: 71 BD: 1 +.+.: sk_lock-AF_UNIX ->slock-AF_UNIX ->fs_reclaim ->stock_lock ->&f->f_lock ->&rq->__lock FD: 1 BD: 2 +...: slock-AF_UNIX FD: 1 BD: 8 +.+.: vmap_purge_lock.wait_lock FD: 68 BD: 1 .+.+: kn->active#41 ->fs_reclaim ->&c->lock ->&____s->seqcount ->&kernfs_locks->open_file_mutex[count] FD: 182 BD: 52 ++++: dev_addr_sem ->net_rwsem ->&tn->lock ->&sdata->sec_mtx ->fs_reclaim ->pool_lock#2 ->nl_table_lock ->rlock-AF_NETLINK ->nl_table_wait.lock ->&tbl->lock ->&pn->hash_lock ->&obj_hash[i].lock ->input_pool.lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&n->list_lock ->&br->lock ->quarantine_lock ->remove_cache_srcu ->rcu_node_0 ->_xmit_ETHER ->&hard_iface->bat_iv.ogm_buff_mutex ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->team->team_lock_key#159 ->team->team_lock_key#163 ->team->team_lock_key#165 ->team->team_lock_key#171 ->team->team_lock_key#172 FD: 758 BD: 3 +.+.: nlk_cb_mutex-GENERIC ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->rtnl_mutex ->&rdev->wiphy.mtx ->rlock-AF_NETLINK ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->&rq->__lock ->(console_sem).lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->genl_mutex ->&devlink->lock_key#159 ->&devlink->lock_key#163 ->&devlink->lock_key#165 ->&devlink->lock_key#171 ->&devlink->lock_key#172 FD: 19 BD: 56 +...: &rdev->bss_lock ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->quarantine_lock ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 21 BD: 1 +.-.: (&net->sctp.addr_wq_timer) ->&net->sctp.addr_wq_lock FD: 1 BD: 53 +.+.: napi_hash_lock FD: 65 BD: 52 ++..: lapb_list_lock ->pool_lock#2 ->&obj_hash[i].lock ->&base->lock ->&c->lock ->&lapb->lock ->(&lapb->t1timer) ->(&lapb->t2timer) FD: 714 BD: 2 +.+.: (work_completion)(&aux->work) ->vmap_area_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->vmap_purge_lock ->pool_lock#2 ->pcpu_lock ->stock_lock ->&rq->__lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->rcu_node_0 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&rcu_state.expedited_wq ->&base->lock ->&cfs_rq->removed.lock ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock FD: 4 BD: 52 ++.-: x25_neigh_list_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 52 +...: _xmit_SLIP FD: 15 BD: 1 +.-.: (&eql->timer) ->&eql->queue.lock ->&obj_hash[i].lock ->&base->lock FD: 4 BD: 55 +.-.: &eql->queue.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 68 BD: 99 +.+.: xps_map_mutex ->fs_reclaim ->pool_lock#2 ->jump_label_mutex ->&rq->__lock FD: 1 BD: 56 +.+.: &data->mutex FD: 17 BD: 73 +...: &local->filter_lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->krc.lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock FD: 31 BD: 74 ..-.: &rdev->wiphy_work_lock FD: 399 BD: 7 +.+.: (work_completion)(&rdev->wiphy_work) ->&rdev->wiphy.mtx ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock FD: 86 BD: 53 +.-.: &dev->tx_global_lock ->_xmit_ETHER#2 ->&obj_hash[i].lock ->&base->lock ->_xmit_NETROM ->_xmit_NONE#2 ->_xmit_TUNNEL6#2 ->_xmit_SIT#2 ->_xmit_TUNNEL#2 ->_xmit_IPGRE#2 ->&qdisc_xmit_lock_key ->&qdisc_xmit_lock_key#2 ->&vlan_netdev_xmit_lock_key ->&batadv_netdev_xmit_lock_key ->&qdisc_xmit_lock_key#3 ->&qdisc_xmit_lock_key#4 ->_xmit_LOOPBACK#2 ->_xmit_X25#2 FD: 59 BD: 85 +.-.: _xmit_ETHER#2 ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&base->lock FD: 1 BD: 59 +.-.: &sch->q.lock FD: 1 BD: 57 ....: class FD: 1 BD: 57 ....: (&tbl->proxy_timer) FD: 1 BD: 52 +...: _xmit_VOID FD: 1 BD: 52 +...: _xmit_X25 FD: 5 BD: 55 +...: &lapbeth->up_lock ->&obj_hash[i].lock ->pool_lock#2 ->quarantine_lock FD: 62 BD: 54 +.-.: &lapb->lock ->pool_lock#2 ->&obj_hash[i].lock ->&____s->seqcount ->&base->lock ->&c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->&n->list_lock ->&list->lock#20 FD: 1 BD: 136 +.-.: &ul->lock FD: 77 BD: 52 +...: dev->qdisc_tx_busylock ?: &qdisc_tx_busylock ->_xmit_ETHER#2 ->&obj_hash[i].lock ->pool_lock#2 ->&r->producer_lock#3 ->&sch->q.lock ->&base->lock ->_xmit_SLIP#2 ->_xmit_NETROM ->quarantine_lock ->&meta->lock ->kfence_freelist_lock FD: 6 BD: 12 +.+.: fasync_lock ->&new->fa_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 156 ....: &tty->ctrl.lock FD: 1 BD: 1 +.+.: &buf->lock FD: 1 BD: 8 ....: &tty->flow.lock FD: 2 BD: 115 +.+.: &(ei->i_block_reservation_lock) ->key#15 FD: 33 BD: 2 +.+.: (work_completion)(&work->work) ->devices_rwsem ->&obj_hash[i].lock ->&____s->seqcount ->pool_lock#2 ->quarantine_lock ->&base->lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&meta->lock ->kfence_freelist_lock FD: 712 BD: 2 +.+.: (work_completion)(&(&ifa->dad_work)->work) ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 96 +.-.: rt6_exception_lock FD: 183 BD: 5 +.+.: &ldata->atomic_read_lock ->&tty->termios_rwsem ->(work_completion)(&buf->work) ->&rq->__lock FD: 32 BD: 1 +.-.: &tx->clean_lock ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&base->lock FD: 29 BD: 2 +.+.: &net->packet.sklist_lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 235 BD: 2 +.+.: sk_lock-AF_PACKET ->slock-AF_PACKET ->&po->bind_lock ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->&mm->mmap_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->pcpu_alloc_mutex ->batched_entropy_u32.lock ->vmap_purge_lock ->&fp->aux->used_maps_mutex ->&c->lock ->pool_lock ->&n->list_lock ->&rnp->exp_lock ->rcu_state.exp_mutex ->init_mm.page_table_lock ->&rnp->exp_wq[0] ->&rnp->exp_wq[2] ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rnp->exp_wq[3] ->rcu_node_0 ->remove_cache_srcu ->&rnp->exp_wq[1] ->&rcu_state.expedited_wq FD: 28 BD: 3 +...: slock-AF_PACKET ->&sk->sk_lock.wq FD: 34 BD: 53 +.+.: &po->bind_lock ->ptype_lock ->pool_lock#2 ->&dir->lock#2 FD: 1 BD: 146 +.-.: rlock-AF_PACKET FD: 1 BD: 1 +...: wlock-AF_PACKET FD: 31 BD: 1 ..-.: &(&idev->mc_ifc_work)->timer FD: 155 BD: 1 +.+.: (wq_completion)mld ->(work_completion)(&(&idev->mc_ifc_work)->work) ->(work_completion)(&(&idev->mc_dad_work)->work) ->&rq->__lock FD: 153 BD: 2 +.+.: (work_completion)(&(&idev->mc_ifc_work)->work) ->&idev->mc_lock ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock FD: 17 BD: 61 +.-.: &ul->lock#2 ->pool_lock#2 ->&dir->lock#2 ->&c->lock ->&n->list_lock FD: 16 BD: 127 ++--: &n->lock ->&____s->seqcount#8 ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&(&n->ha_lock)->lock FD: 1 BD: 129 +.--: &____s->seqcount#8 FD: 43 BD: 2 +.+.: (work_completion)(&w->work)#2 ->pool_lock#2 ->&dir->lock ->&obj_hash[i].lock ->nf_conntrack_mutex ->&rq->__lock ->nf_conntrack_mutex.wait_lock ->&p->pi_lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock FD: 1 BD: 134 ...-: &____s->seqcount#9 FD: 31 BD: 1 ..-.: &(&idev->mc_dad_work)->timer FD: 153 BD: 2 +.+.: (work_completion)(&(&idev->mc_dad_work)->work) ->&idev->mc_lock ->&rq->__lock FD: 31 BD: 1 ..-.: net/core/link_watch.c:31 FD: 1 BD: 126 +.-.: lock#8 FD: 1 BD: 126 ..-.: id_table_lock FD: 26 BD: 2 +.+.: fanout_mutex ->&rq->__lock FD: 1 BD: 2 +...: clock-AF_PACKET FD: 31 BD: 1 ..-.: &(&ifa->dad_work)->timer FD: 31 BD: 1 ..-.: drivers/base/dd.c:321 FD: 37 BD: 2 +.+.: (deferred_probe_timeout_work).work ->device_links_lock ->deferred_probe_mutex ->deferred_probe_work ->&x->wait#10 ->&rq->__lock ->&obj_hash[i].lock FD: 74 BD: 1 .+.+: &type->s_umount_key#40 ->&sb->s_type->i_lock_key#3 ->mmu_notifier_invalidate_range_start ->batched_entropy_u8.lock ->kfence_freelist_lock ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->lock#4 ->lock#5 ->&wb->list_lock ->&c->lock ->&rq->__lock ->&____s->seqcount ->&____s->seqcount#2 ->&n->list_lock ->lock#11 FD: 1 BD: 297 -...: &s->s_inode_wblist_lock FD: 1 BD: 298 -...: key#12 FD: 83 BD: 115 +.+.: &lg->lg_mutex ->&ei->i_prealloc_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->pool_lock#2 ->lock#4 ->&mapping->i_private_lock ->&ret->b_state_lock ->&journal->j_revoke_lock ->&pa->pa_lock ->&lg->lg_prealloc_lock ->bit_wait_table + i ->&__ctx->lock ->&obj_hash[i].lock ->&rq->__lock FD: 1 BD: 116 +.+.: &pa->pa_lock FD: 1 BD: 116 +.+.: &lg->lg_prealloc_lock FD: 31 BD: 3 -.-.: &ei->i_completed_io_lock FD: 137 BD: 1 +.+.: (wq_completion)ext4-rsv-conversion ->(work_completion)(&ei->i_rsv_conversion_work) FD: 136 BD: 2 +.+.: (work_completion)(&ei->i_rsv_conversion_work) ->&ei->i_completed_io_lock ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->pool_lock#2 ->&ext4__ioend_wq[i] ->&ret->b_uptodate_lock ->&folio_wait_table[i] ->rcu_node_0 ->&rq->__lock ->&lruvec->lru_lock ->&cfs_rq->removed.lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&rcu_state.expedited_wq FD: 1 BD: 116 ....: &journal->j_wait_reserved FD: 1 BD: 3 ....: &ext4__ioend_wq[i] FD: 1 BD: 8 +.-.: &ct->lock FD: 31 BD: 1 ..-.: &(&wb->bw_dwork)->timer FD: 61 BD: 2 +.+.: (work_completion)(&(&wb->bw_dwork)->work) ->&wb->list_lock FD: 1 BD: 122 ....: key#13 FD: 87 BD: 1 +.-.: (&dev->watchdog_timer) ->&dev->tx_global_lock FD: 63 BD: 53 +.-.: (&lapb->t1timer) ->&lapb->lock FD: 16 BD: 1 +.-.: (&dom->period_timer) ->key#14 ->&p->sequence ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 299 -.-.: key#14 FD: 2 BD: 297 ....: &pl->lock ->key#12 FD: 33 BD: 116 +.-.: &nf_conntrack_locks[i] ->&nf_conntrack_locks[i]/1 ->batched_entropy_u8.lock ->once_lock ->&c->lock ->pool_lock#2 ->&obj_hash[i].lock FD: 4 BD: 117 +.-.: &nf_conntrack_locks[i]/1 ->batched_entropy_u8.lock FD: 1 BD: 80 +.-.: &hashinfo->ehash_locks[i] FD: 2 BD: 128 +.-.: &(&n->ha_lock)->lock ->&____s->seqcount#8 FD: 1 BD: 7 ..-.: (&req->rsk_timer) FD: 1 BD: 7 +.-.: &icsk->icsk_accept_queue.rskq_lock FD: 1 BD: 3 +.-.: &sd->defer_lock FD: 78 BD: 1 +.-.: (&icsk->icsk_retransmit_timer) ->slock-AF_INET#2 FD: 78 BD: 1 +.-.: (&icsk->icsk_delack_timer) ->slock-AF_INET#2 FD: 2 BD: 6 +.-.: tcp_metrics_lock ->pool_lock#2 FD: 1 BD: 116 ....: key#15 FD: 77 BD: 114 +.+.: &sbi->s_orphan_lock ->&rq->__lock ->mmu_notifier_invalidate_range_start ->&ei->i_raw_lock ->&lock->wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->rcu_node_0 ->&ret->b_state_lock ->&mapping->i_private_lock ->pool_lock#2 ->&rcu_state.expedited_wq ->&____s->seqcount ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 31 BD: 1 ..-.: &(&tbl->gc_work)->timer FD: 47 BD: 2 +.+.: (work_completion)(&(&tbl->gc_work)->work) ->&tbl->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 68 BD: 1 .+.+: kn->active#42 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 1 BD: 1 +.+.: &futex_queues[i].lock FD: 1 BD: 4 ....: &on->poll FD: 1 BD: 3 +.+.: module_mutex FD: 3 BD: 3 +.+.: once_mutex ->crngs.lock FD: 31 BD: 1 ..-.: &(&dm_bufio_cleanup_old_work)->timer FD: 16 BD: 1 +.+.: (wq_completion)dm_bufio_cache ->(work_completion)(&(&dm_bufio_cleanup_old_work)->work) FD: 15 BD: 2 +.+.: (work_completion)(&(&dm_bufio_cleanup_old_work)->work) ->dm_bufio_clients_lock ->&obj_hash[i].lock ->&base->lock FD: 31 BD: 1 ..-.: drivers/regulator/core.c:6335 FD: 4 BD: 2 +.+.: (regulator_init_complete_work).work ->&k->list_lock ->&k->k_lock FD: 113 BD: 1 +.+.: &type->s_umount_key#41/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#30 ->crngs.lock ->&root->kernfs_supers_rwsem ->&dentry->d_lock FD: 38 BD: 255 +.+.: &sb->s_type->i_lock_key#30 ->&dentry->d_lock FD: 744 BD: 1 .+.+: sb_writers#8 ->mount_lock ->&type->i_mutex_dir_key#6 ->fs_reclaim ->&mm->mmap_lock ->&of->mutex ->&obj_hash[i].lock ->&type->i_mutex_dir_key#6/1 ->&c->lock ->&n->list_lock ->&rq->__lock ->remove_cache_srcu FD: 87 BD: 2 ++++: &type->i_mutex_dir_key#6 ->tomoyo_ss ->tk_core.seq.seqcount ->&root->kernfs_iattr_rwsem ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#30 ->&c->lock ->&____s->seqcount ->&____s->seqcount#2 ->pool_lock#2 ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->&rq->__lock FD: 68 BD: 1 ++++: kn->active#43 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 FD: 113 BD: 1 +.+.: &type->s_umount_key#42/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#31 ->crngs.lock ->&root->kernfs_supers_rwsem ->&dentry->d_lock ->&c->lock ->&n->list_lock FD: 38 BD: 255 +.+.: &sb->s_type->i_lock_key#31 ->&dentry->d_lock FD: 97 BD: 1 ++++: &type->s_umount_key#43 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->percpu_ref_switch_lock ->&root->kernfs_supers_rwsem ->rename_lock.seqcount ->&dentry->d_lock ->&sb->s_type->i_lock_key#31 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->inode_hash_lock ->pool_lock#2 ->&fsnotify_mark_srcu ->&dentry->d_lock/1 ->&lru->node[i].lock ->&rq->__lock FD: 1 BD: 1 ..-.: percpu_ref_switch_waitq.lock FD: 722 BD: 2 +.+.: (work_completion)(&cgrp->bpf.release_work) ->cgroup_mutex ->cgroup_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->percpu_ref_switch_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 16 +.+.: cgroup_mutex.wait_lock FD: 727 BD: 1 +.+.: (wq_completion)cgroup_destroy ->(work_completion)(&css->destroy_work) ->(work_completion)(&(&css->destroy_rwork)->work) FD: 720 BD: 2 +.+.: (work_completion)(&css->destroy_work) ->cgroup_mutex ->&obj_hash[i].lock ->pool_lock#2 ->cgroup_mutex.wait_lock ->&p->pi_lock FD: 725 BD: 2 +.+.: (work_completion)(&(&css->destroy_rwork)->work) ->percpu_ref_switch_lock ->&obj_hash[i].lock ->pool_lock#2 ->&cgrp->pidlist_mutex ->(wq_completion)cgroup_pidlist_destroy ->&wq->mutex ->(work_completion)(&cgrp->release_agent_work) ->cgroup_mutex ->cgroup_rstat_lock ->pcpu_lock ->&root->kernfs_rwsem ->kernfs_idr_lock ->krc.lock ->cgroup_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 3 +.+.: &cgrp->pidlist_mutex FD: 1 BD: 3 +.+.: (wq_completion)cgroup_pidlist_destroy FD: 1 BD: 3 +.+.: (work_completion)(&cgrp->release_agent_work) FD: 744 BD: 1 .+.+: sb_writers#9 ->mount_lock ->&type->i_mutex_dir_key#7 ->fs_reclaim ->&mm->mmap_lock ->&of->mutex ->&obj_hash[i].lock ->&type->i_mutex_dir_key#7/1 ->&c->lock ->remove_cache_srcu FD: 89 BD: 2 ++++: &type->i_mutex_dir_key#7 ->tomoyo_ss ->tk_core.seq.seqcount ->&root->kernfs_iattr_rwsem ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#31 ->&c->lock ->pool_lock#2 ->&rq->__lock ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock FD: 1 BD: 16 +.+.: &dom->lock FD: 68 BD: 1 .+.+: kn->active#44 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 256 BD: 1 .+.+: kn->active#45 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] ->cpu_hotplug_lock FD: 213 BD: 1 .+.+: sb_writers#10 ->&mm->mmap_lock ->&attr->mutex ->mount_lock ->&sb->s_type->i_mutex_key#3 ->&dentry->d_lock ->tomoyo_ss ->fs_reclaim ->stock_lock ->&c->lock ->&n->list_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu FD: 211 BD: 2 +.+.: &attr->mutex ->&mm->mmap_lock FD: 40 BD: 1 +.+.: &type->s_umount_key#44 ->sb_lock ->&dentry->d_lock FD: 96 BD: 2 +.+.: &sb->s_type->i_mutex_key#16 ->namespace_sem ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&c->lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#26 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->init_binfmt_misc.entries_lock FD: 221 BD: 1 .+.+: sb_writers#11 ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&sb->s_type->i_mutex_key#16 FD: 1 BD: 52 +.+.: &wpan_dev->association_lock FD: 1 BD: 54 +...: &pn->hash_lock FD: 43 BD: 1 +...: &net->ipv6.fib6_gc_lock ->&obj_hash[i].lock FD: 1 BD: 52 +...: _xmit_IEEE802154 FD: 1 BD: 3 +.+.: &xa->xa_lock#16 FD: 1 BD: 6 ....: genl_sk_destructing_waitq.lock FD: 1 BD: 3 +...: &rdev->beacon_registrations_lock FD: 1 BD: 57 +...: &rdev->mgmt_registrations_lock FD: 1 BD: 57 +...: &wdev->pmsr_lock FD: 1 BD: 53 +.+.: reg_indoor_lock FD: 169 BD: 2 +.+.: (work_completion)(&w->w) ->nfc_devlist_mutex ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->&meta->lock ->kfence_freelist_lock ->&base->lock ->nfc_devlist_mutex.wait_lock ->&p->pi_lock ->quarantine_lock ->rcu_node_0 ->&cfs_rq->removed.lock FD: 26 BD: 5 +.+.: &genl_data->genl_data_mutex ->&rq->__lock FD: 66 BD: 6 +.+.: swap_cgroup_mutex ->fs_reclaim ->&____s->seqcount FD: 1 BD: 6 +.+.: &((cluster_info + ci)->lock)/1 FD: 82 BD: 6 +.+.: swapon_mutex ->fs_reclaim ->pool_lock#2 ->swap_lock ->percpu_ref_switch_lock ->(console_sem).lock FD: 40 BD: 549 +.+.: &p->lock#2 ->swap_avail_lock ->&((cluster_info + ci)->lock)#2 ->batched_entropy_u32.lock ->&ctrl->lock#2 ->&tree->lock ->&xa->xa_lock#23 FD: 1 BD: 549 +.+.: swap_avail_lock FD: 1 BD: 6 ....: proc_poll_wait.lock FD: 257 BD: 1 +.+.: swap_slots_cache_enable_mutex ->cpu_hotplug_lock ->swap_lock FD: 1 BD: 101 +.+.: swap_slots_cache_mutex FD: 1 BD: 154 ....: &newf->resize_wait FD: 13 BD: 128 ..-.: &kcov->lock ->kcov_remote_lock FD: 134 BD: 1 +.+.: pid_caches_mutex ->slab_mutex FD: 40 BD: 1 +.+.: &type->s_umount_key#45 ->sb_lock ->&dentry->d_lock ->&rq->__lock FD: 93 BD: 1 +.+.: &sb->s_type->i_mutex_key#17 ->namespace_sem ->&rq->__lock FD: 1 BD: 26 ++++: hci_sk_list.lock FD: 1 BD: 1 +.+.: (work_completion)(&(&data->open_timeout)->work) FD: 734 BD: 1 +.+.: &data->open_mutex ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->&obj_hash[i].lock ->&x->wait#9 ->hci_index_ida.xa_lock ->&c->lock ->pcpu_alloc_mutex ->cpu_hotplug_lock ->wq_pool_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->subsys mutex#74 ->&dev->devres_lock ->triggers_list_lock ->leds_list_lock ->rfkill_global_mutex ->&rfkill->lock ->hci_dev_list_lock ->tk_core.seq.seqcount ->hci_sk_list.lock ->(pm_chain_head).rwsem ->&rq->__lock ->&list->lock#7 ->&data->read_wait ->rfkill_global_mutex.wait_lock ->&p->pi_lock ->&n->list_lock ->&sem->wait_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->quarantine_lock ->remove_cache_srcu ->uevent_sock_mutex.wait_lock ->rcu_node_0 ->batched_entropy_u8.lock ->kfence_freelist_lock ->pcpu_alloc_mutex.wait_lock FD: 1 BD: 2 ....: hci_index_ida.xa_lock FD: 28 BD: 23 +.+.: subsys mutex#74 ->&k->k_lock ->&rq->__lock FD: 1 BD: 14 ++++: hci_dev_list_lock FD: 1 BD: 54 +...: &qdisc_xmit_lock_key FD: 198 BD: 9 +.+.: (work_completion)(&hdev->power_on) ->&hdev->req_lock ->fs_reclaim ->pool_lock#2 ->tk_core.seq.seqcount ->hci_sk_list.lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&rq->__lock ->&____s->seqcount#2 FD: 197 BD: 11 +.+.: &hdev->req_lock ->&obj_hash[i].lock ->pool_lock#2 ->&list->lock#5 ->&list->lock#6 ->&hdev->req_wait_q ->&base->lock ->&rq->__lock ->(&timer.timer) ->&c->lock ->&____s->seqcount ->tk_core.seq.seqcount ->hci_sk_list.lock ->&cfs_rq->removed.lock ->(work_completion)(&(&hdev->interleave_scan)->work) ->hci_dev_list_lock ->(work_completion)(&hdev->tx_work) ->(work_completion)(&hdev->rx_work) ->&wq->mutex ->&hdev->lock ->&list->lock#7 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->pool_lock ->&meta->lock ->quarantine_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->(wq_completion)hci3#2 ->(wq_completion)hci2#2 ->(wq_completion)hci0#4 FD: 1 BD: 12 ....: &list->lock#5 FD: 1 BD: 24 ....: &list->lock#6 FD: 27 BD: 21 ....: &hdev->req_wait_q ->&p->pi_lock FD: 1 BD: 54 +...: &qdisc_xmit_lock_key#2 FD: 70 BD: 20 +.+.: (work_completion)(&hdev->cmd_work) ->&list->lock#6 ->fs_reclaim ->pool_lock#2 ->tk_core.seq.seqcount ->&list->lock#7 ->&obj_hash[i].lock ->&data->read_wait ->&c->lock ->&____s->seqcount ->&rq->__lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&____s->seqcount#2 ->remove_cache_srcu ->quarantine_lock FD: 1 BD: 23 ....: &list->lock#7 FD: 27 BD: 23 ....: &data->read_wait ->&p->pi_lock FD: 80 BD: 2 +.+.: sk_lock-AF_BLUETOOTH-BTPROTO_HCI ->slock-AF_BLUETOOTH-BTPROTO_HCI ->sock_cookie_ida.xa_lock ->&p->alloc_lock ->pool_lock#2 ->tk_core.seq.seqcount ->hci_sk_list.lock ->&obj_hash[i].lock ->&c->lock ->clock-AF_BLUETOOTH ->&____s->seqcount ->&rq->__lock ->&n->list_lock ->&____s->seqcount#2 ->quarantine_lock FD: 1 BD: 3 +...: slock-AF_BLUETOOTH-BTPROTO_HCI FD: 1 BD: 3 ....: sock_cookie_ida.xa_lock FD: 1 BD: 5 ....: netdev_unregistering_wq.lock FD: 2 BD: 1 +.+.: &ctx->cancel_lock ->cancel_lock FD: 39 BD: 8 +.+.: nf_conntrack_mutex ->&nf_conntrack_locks[i] ->&rq->__lock ->&____s->seqcount#7 ->&nf_conntrack_locks[i]/1 ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->nf_conntrack_mutex.wait_lock FD: 180 BD: 20 +.+.: (work_completion)(&hdev->rx_work) ->&list->lock#6 ->lock#6 ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->&c->lock ->&n->list_lock ->&hdev->lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&rq->__lock ->&obj_hash[i].lock ->&hdev->req_wait_q ->&base->lock ->chan_list_lock ->&____s->seqcount#2 ->&meta->lock ->kfence_freelist_lock ->batched_entropy_u8.lock ->quarantine_lock ->remove_cache_srcu ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 173 BD: 21 +.+.: &hdev->lock ->&xa->xa_lock#17 ->fs_reclaim ->&c->lock ->&n->list_lock ->pool_lock#2 ->&obj_hash[i].lock ->&x->wait#9 ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&k->k_lock ->subsys mutex#74 ->&list->lock#6 ->&hdev->unregister_lock ->hci_cb_list_lock ->&base->lock ->tk_core.seq.seqcount ->hci_sk_list.lock ->&____s->seqcount ->(work_completion)(&(&conn->disc_work)->work) ->(work_completion)(&(&conn->auto_accept_work)->work) ->(work_completion)(&(&conn->idle_work)->work) ->&x->wait#2 ->&rq->__lock ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu ->&meta->lock ->hci_cb_list_lock.wait_lock ->&p->pi_lock ->&____s->seqcount#2 ->&sem->wait_lock ->rcu_node_0 ->rcu_state.exp_mutex.wait_lock ->&rnp->exp_lock ->&rnp->exp_wq[3] ->&cfs_rq->removed.lock ->quarantine_lock ->pool_lock ->&rcu_state.expedited_wq ->uevent_sock_mutex.wait_lock FD: 1 BD: 2 ....: rds_cong_lock FD: 10 BD: 22 ....: &xa->xa_lock#17 ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock FD: 69 BD: 23 +.+.: &hdev->unregister_lock ->fs_reclaim ->pool_lock#2 ->&hdev->cmd_sync_work_lock ->&c->lock ->&n->list_lock ->&rq->__lock ->remove_cache_srcu FD: 26 BD: 24 +.+.: &hdev->cmd_sync_work_lock ->&rq->__lock FD: 26 BD: 23 +.+.: &conn->ident_lock ->&rq->__lock FD: 1 BD: 24 ....: &list->lock#8 FD: 1 BD: 25 +.+.: &conn->chan_lock FD: 198 BD: 9 +.+.: (work_completion)(&hdev->cmd_sync_work) ->&hdev->cmd_sync_work_lock ->&hdev->req_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock FD: 31 BD: 20 +.+.: (work_completion)(&hdev->tx_work) ->&list->lock#8 ->tk_core.seq.seqcount ->&list->lock#7 ->&data->read_wait ->&list->lock#6 ->&rq->__lock FD: 2 BD: 20 +.+.: (work_completion)(&conn->pending_rx_work) ->&list->lock#9 FD: 1 BD: 21 ....: &list->lock#9 FD: 1 BD: 3 +...: clock-AF_BLUETOOTH FD: 1 BD: 27 ....: namespace_sem.wait_lock FD: 1 BD: 199 +.+.: &undo_list->lock FD: 1 BD: 18 +...: slock-AF_TIPC FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#3 FD: 1 BD: 52 +...: &nr_netdev_addr_lock_key FD: 1 BD: 52 +...: listen_lock FD: 2 BD: 6 +.+.: rdma_nets.xa_lock ->pool_lock#2 FD: 1 BD: 4 +.+.: &____s->seqcount#10 FD: 2 BD: 3 +.+.: &(&net->ipv4.ping_group_range.lock)->lock ->&____s->seqcount#10 FD: 2 BD: 52 +.+.: &r->consumer_lock ->&r->producer_lock FD: 1 BD: 62 +...: &r->producer_lock FD: 1 BD: 4 +.+.: &table->hbs[i].lock FD: 2 BD: 100 +.+.: (work_completion)(flush) ->&list->lock#12 FD: 8 BD: 60 +...: &bridge_netdev_addr_lock_key ->pool_lock#2 ->&c->lock ->&n->list_lock FD: 37 BD: 55 +.-.: &br->hash_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->&____s->seqcount#2 ->&n->list_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&base->lock FD: 26 BD: 52 +.+.: j1939_netdev_lock ->&rq->__lock FD: 8 BD: 60 +...: &dev_addr_list_lock_key#2 ->pool_lock#2 ->&c->lock ->&n->list_lock FD: 9 BD: 52 +...: &bat_priv->tvlv.handler_list_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&n->list_lock FD: 14 BD: 59 +...: &bat_priv->tvlv.container_list_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->quarantine_lock ->&____s->seqcount#2 ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 9 BD: 60 +...: &batadv_netdev_addr_lock_key ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&n->list_lock FD: 8 BD: 61 +...: &bat_priv->softif_vlan_list_lock ->pool_lock#2 ->&c->lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 18 BD: 60 +...: key#16 ->&bat_priv->softif_vlan_list_lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 6 BD: 59 +...: &bat_priv->tt.changes_list_lock ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock FD: 31 BD: 1 ..-.: &(&bat_priv->nc.work)->timer FD: 62 BD: 1 +.+.: (wq_completion)bat_events ->(work_completion)(&(&bat_priv->nc.work)->work) ->(work_completion)(&(&bat_priv->mcast.work)->work) ->(work_completion)(&(&bat_priv->orig_work)->work) ->(work_completion)(&(&forw_packet_aggr->delayed_work)->work) ->(work_completion)(&(&bat_priv->tt.work)->work) ->(work_completion)(&(&bat_priv->dat.work)->work) ->(work_completion)(&(&bat_priv->bla.work)->work) ->&rq->__lock ->(work_completion)(&barr->work) ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 32 BD: 6 +.+.: (work_completion)(&(&bat_priv->nc.work)->work) ->key#17 ->key#18 ->&obj_hash[i].lock ->&base->lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock ->pool_lock#2 ->&cfs_rq->removed.lock FD: 1 BD: 7 +...: key#17 FD: 1 BD: 7 +...: key#18 FD: 134 BD: 53 +.+.: init_lock ->slab_mutex ->fs_reclaim ->&zone->lock ->&____s->seqcount ->&obj_hash[i].lock ->&base->lock ->crngs.lock FD: 1 BD: 72 +.-.: deferred_lock FD: 712 BD: 2 +.+.: deferred_process_work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 51 BD: 54 +.-.: &br->lock ->&br->hash_lock ->lweventlist_lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&dir->lock#2 ->deferred_lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&c->lock ->&____s->seqcount ->nl_table_lock ->nl_table_wait.lock ->&br->multicast_lock ->&n->list_lock ->&____s->seqcount#2 ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 1 BD: 6 +.+.: ebt_mutex.wait_lock FD: 38 BD: 60 +.+.: (work_completion)(&(&slave->notify_work)->work) ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->pool_lock#2 ->rtnl_mutex.wait_lock ->&p->pi_lock ->&cfs_rq->removed.lock FD: 1 BD: 53 +.+.: &bond->stats_lock/1 FD: 31 BD: 1 ..-.: &(&slave->notify_work)->timer FD: 1 BD: 5 ....: &usblp->rwait FD: 712 BD: 2 +.+.: (crda_timeout).work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 1 BD: 52 +...: &pmc->lock FD: 40 BD: 55 +.+.: &hard_iface->bat_iv.ogm_buff_mutex ->crngs.lock ->pool_lock#2 ->rcu_node_0 ->&rq->__lock ->batched_entropy_u8.lock ->&bat_priv->forw_bat_list_lock ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->kfence_freelist_lock ->&bat_priv->tt.commit_lock ->&bat_priv->tvlv.container_list_lock ->&____s->seqcount#2 ->&n->list_lock ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq FD: 14 BD: 56 +...: &bat_priv->forw_bat_list_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 52 +...: _xmit_NONE FD: 1 BD: 52 +...: lock#9 FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg0#314 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 53 ...-: &____s->seqcount#11 FD: 31 BD: 1 ..-.: &(&bat_priv->mcast.work)->timer FD: 38 BD: 6 +.+.: (work_completion)(&(&bat_priv->mcast.work)->work) ->pool_lock#2 ->&bat_priv->mcast.mla_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->rcu_node_0 ->kfence_freelist_lock ->&meta->lock ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->quarantine_lock FD: 23 BD: 7 +.+.: &bat_priv->mcast.mla_lock ->pool_lock#2 ->key#16 ->&bat_priv->tt.changes_list_lock ->&bat_priv->tvlv.container_list_lock ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&meta->lock ->kfence_freelist_lock ->batched_entropy_u8.lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#20 FD: 1 BD: 64 +.-.: &hsr->list_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 3 +...: &log->instances_lock FD: 1 BD: 52 +...: nr_list_lock FD: 9 BD: 60 +...: &vlan_netdev_addr_lock_key ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&n->list_lock FD: 10 BD: 60 +...: &macvlan_netdev_addr_lock_key ->pool_lock#2 ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 151 ...-: init_task.mems_allowed_seq.seqcount FD: 10 BD: 60 +...: &dev_addr_list_lock_key#3 ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&n->list_lock ->&____s->seqcount#2 FD: 1 BD: 52 ....: &xa->xa_lock#18 FD: 17 BD: 52 +.-.: (&app->join_timer) ->&app->lock ->&list->lock#10 FD: 15 BD: 54 +.-.: &app->lock ->batched_entropy_u32.lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 53 ..-.: &list->lock#10 FD: 17 BD: 60 +...: &dev_addr_list_lock_key#3/1 ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock ->&c->lock ->&n->list_lock FD: 17 BD: 52 +.-.: (&app->join_timer)#2 ->&app->lock#2 ->&list->lock#11 ->batched_entropy_u32.lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 53 +.-.: &app->lock#2 FD: 1 BD: 53 ..-.: &list->lock#11 FD: 31 BD: 1 ..-.: &(&bat_priv->orig_work)->timer FD: 31 BD: 1 ..-.: drivers/net/wireguard/ratelimiter.c:20 FD: 30 BD: 6 +.+.: (work_completion)(&(&bat_priv->orig_work)->work) ->key#19 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->rcu_node_0 ->&cfs_rq->removed.lock ->pool_lock#2 ->&rcu_state.expedited_wq FD: 1 BD: 54 +...: key#19 FD: 30 BD: 2 +.+.: (gc_work).work ->tk_core.seq.seqcount ->"ratelimiter_table_lock" ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 3 +.+.: "ratelimiter_table_lock" FD: 7 BD: 52 +.+.: &tap_major->minor_lock ->&c->lock ->pool_lock#2 FD: 3 BD: 52 +.+.: subsys mutex#75 ->&k->k_lock FD: 731 BD: 1 .+.+: kn->active#46 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->nsim_bus_dev_list_lock ->&c->lock ->nsim_bus_dev_list_lock.wait_lock ->&p->pi_lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->remove_cache_srcu ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 730 BD: 9 +.+.: nsim_bus_dev_list_lock ->fs_reclaim ->pool_lock#2 ->nsim_bus_dev_ids.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->uevent_sock_mutex.wait_lock ->&p->pi_lock ->device_links_lock ->nsim_bus_dev_list_lock.wait_lock ->&rq->__lock ->deferred_probe_mutex ->subsys mutex#76 ->&n->list_lock ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount#2 ->quarantine_lock ->&lock->wait_lock ->&sem->wait_lock ->remove_cache_srcu ->&cfs_rq->removed.lock FD: 731 BD: 1 .+.+: kn->active#47 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] ->nsim_bus_dev_list_lock ->nsim_bus_dev_list_lock.wait_lock ->&p->pi_lock ->&rq->__lock ->&____s->seqcount ->remove_cache_srcu ->&n->list_lock ->&____s->seqcount#2 FD: 1 BD: 10 ....: nsim_bus_dev_ids.xa_lock FD: 1 BD: 173 +.+.: uevent_sock_mutex.wait_lock FD: 9 BD: 18 +.+.: devlinks.xa_lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock FD: 52 BD: 1 +.-.: (&p->forward_delay_timer) ->&br->lock FD: 13 BD: 18 +.+.: &xa->xa_lock#19 ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 31 BD: 1 ..-.: &(&nsim_dev->trap_data->trap_report_dw)->timer FD: 33 BD: 2 +.+.: (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 98 +...: &data->fib_event_queue_lock FD: 1 BD: 18 ....: &(&fn_net->fib_chain)->lock FD: 75 BD: 2 +.+.: (work_completion)(&data->fib_event_work) ->&data->fib_event_queue_lock ->&data->fib_lock ->rcu_node_0 ->&rq->__lock ->&rcu_state.expedited_wq FD: 73 BD: 3 +.+.: &data->fib_lock ->fs_reclaim ->pool_lock#2 ->pool_lock ->&obj_hash[i].lock ->&base->lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock ->(&timer.timer) ->&c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu ->&n->list_lock ->&____s->seqcount ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&meta->lock ->quarantine_lock FD: 1 BD: 10 +.+.: nsim_bus_dev_list_lock.wait_lock FD: 31 BD: 1 ..-.: &(&forw_packet_aggr->delayed_work)->timer FD: 43 BD: 53 +.+.: (work_completion)(&(&forw_packet_aggr->delayed_work)->work) ->&hard_iface->bat_iv.ogm_buff_mutex ->&bat_priv->forw_bat_list_lock ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->&rq->__lock ->batched_entropy_u8.lock ->quarantine_lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&base->lock ->&rcu_state.expedited_wq FD: 71 BD: 56 +.+.: bpf_devs_lock ->&rq->__lock ->fs_reclaim ->&c->lock ->pool_lock#2 ->rcu_node_0 ->&obj_hash[i].lock ->&n->list_lock ->remove_cache_srcu ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 26 BD: 52 +.+.: (work_completion)(&(&devlink_port->type_warn_dw)->work) ->&rq->__lock FD: 1 BD: 52 +...: &devlink_port->type_lock FD: 1 BD: 52 +.+.: &vn->sock_lock FD: 1 BD: 10 +.+.: subsys mutex#76 FD: 1 BD: 54 +...: _xmit_IPGRE#2 FD: 31 BD: 1 ..-.: &(&hwstats->traffic_dw)->timer FD: 27 BD: 2 +.+.: (work_completion)(&(&hwstats->traffic_dw)->work) ->&hwstats->hwsdev_list_lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&rq->__lock ->&cfs_rq->removed.lock FD: 26 BD: 53 +.+.: &hwstats->hwsdev_list_lock ->&rq->__lock FD: 17 BD: 20 +.+.: &nsim_trap_data->trap_lock ->pool_lock#2 ->&c->lock ->crngs.lock ->&nsim_dev->fa_cookie_lock ->&obj_hash[i].lock ->&____s->seqcount ->&n->list_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&____s->seqcount#2 ->&base->lock FD: 1 BD: 21 +...: &nsim_dev->fa_cookie_lock FD: 339 BD: 52 +.+.: &wg->device_update_lock ->&wg->static_identity.lock ->fs_reclaim ->&zone->lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->pcpu_alloc_mutex ->&handshake->lock ->&obj_hash[i].lock ->tk_core.seq.seqcount ->&table->lock ->&peer->endpoint_lock ->pool_lock ->&rq->__lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&dir->lock ->k-slock-AF_INET/1 ->k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->cpu_hotplug_lock ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->&wg->socket_update_lock ->rcu_node_0 ->&list->lock#14 ->&rnp->exp_wq[0] ->&rnp->exp_wq[1] ->&____s->seqcount#2 ->&rnp->exp_wq[2] ->&peer->keypairs.keypair_update_lock ->&base->lock ->&rnp->exp_wq[3] ->batched_entropy_u8.lock ->kfence_freelist_lock ->&x->wait#2 ->&table->hash[i].lock ->k-clock-AF_INET ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->k-clock-AF_INET6 ->(&peer->timer_retransmit_handshake) ->(&peer->timer_send_keepalive) ->(&peer->timer_new_handshake) ->(&peer->timer_zero_key_material) ->(&peer->timer_persistent_keepalive) ->(work_completion)(&peer->clear_peer_work) ->&wq->mutex ->napi_hash_lock ->&table->lock#2 ->wq_pool_mutex ->wq_mayday_lock ->&p->pi_lock ->&x->wait ->pcpu_lock ->&r->consumer_lock#2 ->rcu_state.barrier_mutex ->init_lock ->&cfs_rq->removed.lock ->&n->list_lock ->rcu_state.barrier_mutex.wait_lock ->quarantine_lock ->&rnp->exp_lock ->rcu_state.exp_mutex ->rcu_state.exp_mutex.wait_lock ->key ->percpu_counters_lock ->stock_lock ->remove_cache_srcu ->&rcu_state.expedited_wq ->(wq_completion)wg-crypt-wg0#150 ->(wq_completion)wg-kex-wg0#295 ->(wq_completion)wg-crypt-wg2#151 ->(wq_completion)wg-kex-wg2#305 ->(wq_completion)wg-kex-wg2#304 ->(wq_completion)wg-crypt-wg1#154 ->(wq_completion)wg-kex-wg1#308 ->(wq_completion)wg-kex-wg1#307 ->(wq_completion)wg-crypt-wg0#160 ->(wq_completion)wg-kex-wg0#315 ->(wq_completion)wg-kex-wg0#316 ->(wq_completion)wg-crypt-wg2#152 ->(wq_completion)wg-kex-wg2#307 ->(wq_completion)wg-kex-wg2#306 ->(wq_completion)wg-crypt-wg1#155 ->(wq_completion)wg-kex-wg1#309 ->(wq_completion)wg-kex-wg1#310 ->(wq_completion)wg-crypt-wg0#161 ->(wq_completion)wg-kex-wg0#317 ->(wq_completion)wg-kex-wg0#318 FD: 67 BD: 118 ++++: &wg->static_identity.lock ->&handshake->lock ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->tk_core.seq.seqcount ->&peer->keypairs.keypair_update_lock FD: 65 BD: 119 ++++: &handshake->lock ->crngs.lock ->tk_core.seq.seqcount ->&table->lock#2 ->&rq->__lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&sem->wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->rcu_node_0 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq FD: 1 BD: 53 +.+.: &table->lock FD: 58 BD: 121 ++-.: &peer->endpoint_lock ->pool_lock#2 ->&obj_hash[i].lock FD: 1 BD: 1 +...: &list->lock#22 FD: 31 BD: 1 ..-.: &(&hdev->cmd_timer)->timer FD: 42 BD: 20 +.+.: (work_completion)(&(&hdev->cmd_timer)->work) ->(console_sem).lock ->console_owner_lock ->console_owner ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 16 BD: 52 +.-.: (&app->periodic_timer) ->&app->lock FD: 1 BD: 52 +...: _xmit_SIT FD: 1 BD: 5 +.+.: nfc_devlist_mutex.wait_lock FD: 17 BD: 60 +...: &bridge_netdev_addr_lock_key/1 ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 189 BD: 1 +.+.: (wq_completion)hci4#2 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->(work_completion)(&(&conn->disc_work)->work) FD: 40 BD: 52 +.-.: (&brmctx->ip6_own_query.timer) ->&br->multicast_lock FD: 39 BD: 71 +.-.: &br->multicast_lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&dir->lock#2 ->deferred_lock ->nl_table_lock ->nl_table_wait.lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->quarantine_lock FD: 40 BD: 52 +.-.: (&brmctx->ip4_own_query.timer) ->&br->multicast_lock FD: 60 BD: 1 +.-.: (&in_dev->mr_ifc_timer) ->&obj_hash[i].lock ->batched_entropy_u32.lock ->&base->lock FD: 31 BD: 1 ..-.: &(&br->gc_work)->timer FD: 14 BD: 53 +.+.: (work_completion)(&(&br->gc_work)->work) ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 52 +...: _xmit_TUNNEL FD: 17 BD: 52 +...: _xmit_IPGRE ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock ->&n->list_lock FD: 1 BD: 5 +.+.: genl_mutex.wait_lock FD: 1 BD: 52 +...: _xmit_TUNNEL6 FD: 44 BD: 60 +...: &dev_addr_list_lock_key/1 ->_xmit_ETHER ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 50 BD: 55 +.-.: _xmit_TUNNEL6#2 ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock FD: 44 BD: 60 +...: &dev_addr_list_lock_key#2/1 ->_xmit_ETHER ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 1 BD: 165 ..-.: &list->lock#12 FD: 40 BD: 1 +.-.: (&pmctx->ip6_own_query.timer) ->&br->multicast_lock FD: 40 BD: 1 +.-.: (&pmctx->ip4_own_query.timer) ->&br->multicast_lock FD: 44 BD: 60 +...: _xmit_ETHER/1 ->_xmit_ETHER ->&c->lock ->&obj_hash[i].lock ->krc.lock ->pool_lock#2 ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock FD: 22 BD: 62 +.-.: &hsr->seqnr_lock ->pool_lock#2 ->&obj_hash[i].lock ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&base->lock FD: 1 BD: 63 +.-.: &new_node->seq_out_lock FD: 23 BD: 52 +.-.: (&hsr->announce_timer) FD: 26 BD: 52 +.+.: &nn->netlink_tap_lock ->&rq->__lock FD: 17 BD: 60 +...: &batadv_netdev_addr_lock_key/1 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 44 BD: 60 +...: &vlan_netdev_addr_lock_key/1 ->_xmit_ETHER ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->&n->list_lock ->&____s->seqcount#2 FD: 44 BD: 60 +...: &macvlan_netdev_addr_lock_key/1 ->_xmit_ETHER ->&c->lock ->&____s->seqcount ->&____s->seqcount#2 ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 17 BD: 53 +...: &ipvlan->addrs_lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 44 BD: 60 +...: &macsec_netdev_addr_lock_key/1 ->_xmit_ETHER ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 1 BD: 54 +.-.: &list->lock#13 FD: 31 BD: 53 +.+.: (work_completion)(&port->bc_work) ->&list->lock#13 ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->&rq->__lock ->&base->lock ->quarantine_lock FD: 16 BD: 52 +...: dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 ->&sch->q.lock FD: 17 BD: 62 +...: key#20 ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 24 BD: 56 +...: &bat_priv->tt.commit_lock ->key#16 ->&bat_priv->softif_vlan_list_lock ->&bat_priv->tt.changes_list_lock ->&bat_priv->tt.last_changeset_lock ->pool_lock#2 ->&bat_priv->tvlv.container_list_lock ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->&meta->lock ->kfence_freelist_lock ->quarantine_lock FD: 1 BD: 53 +.+.: &wg->socket_update_lock FD: 7 BD: 100 +.-.: &list->lock#14 ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock FD: 1 BD: 2 +.+.: nf_sockopt_mutex.wait_lock FD: 67 BD: 74 +.+.: (work_completion)(&peer->transmit_handshake_work) ->tk_core.seq.seqcount ->&wg->static_identity.lock ->&cookie->lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&peer->endpoint_lock ->batched_entropy_u8.lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&____s->seqcount#2 ->&n->list_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->kfence_freelist_lock FD: 1 BD: 122 +.-.: &table->lock#2 FD: 26 BD: 118 ++++: &cookie->lock ->&rq->__lock FD: 1 BD: 2 +...: nr_node_list_lock FD: 1 BD: 99 +.-.: &r->producer_lock#2 FD: 31 BD: 1 ..-.: net/wireless/reg.c:533 FD: 103 BD: 95 +.+.: (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->&r->consumer_lock#2 ->&wg->static_identity.lock ->&peer->endpoint_lock ->tk_core.seq.seqcount ->&cookie->lock ->&handshake->lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&list->lock#14 ->&c->lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&sem->wait_lock ->&p->pi_lock FD: 1 BD: 96 +.+.: &r->consumer_lock#2 FD: 5 BD: 120 +.-.: &peer->keypairs.keypair_update_lock ->&table->lock#2 ->&obj_hash[i].lock ->pool_lock#2 FD: 13 BD: 1 +...: &nr_netdev_xmit_lock_key ->nr_node_list_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 223 BD: 3 +.+.: device_hotplug_lock ->(console_sem).lock ->wq_pool_mutex ->tasklist_lock ->&rq->__lock ->console_owner_lock ->console_owner ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&mm->mmap_lock ->swap_lock FD: 27 BD: 74 +.+.: (work_completion)(&peer->transmit_packet_work) ->&obj_hash[i].lock ->&peer->endpoint_lock ->&base->lock ->batched_entropy_u8.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 ->&table->lock#2 FD: 1 BD: 172 +.+.: pcpu_alloc_mutex.wait_lock FD: 1 BD: 1 +.-.: &keypair->receiving_counter.lock FD: 6 BD: 53 +...: _xmit_SLIP#2 ->&eql->queue.lock FD: 7 BD: 54 +...: _xmit_X25#2 ->&lapbeth->up_lock FD: 30 BD: 1 +.-.: (&timer) ->&obj_hash[i].lock ->&base->lock ->&txlock ->&txwq FD: 1 BD: 15 +.+.: &data->mtx FD: 1 BD: 56 ....: &wdev->event_lock FD: 1 BD: 56 ....: (&dwork->timer) FD: 1 BD: 56 ....: (&dwork->timer)#2 FD: 1 BD: 56 +.+.: (work_completion)(&(&link->color_collision_detect_work)->work) FD: 1 BD: 57 ..-.: &list->lock#15 FD: 1 BD: 56 +.-.: &ifibss->incomplete_lock FD: 714 BD: 1 +.+.: (wq_completion)cfg80211 ->(work_completion)(&rdev->event_work) ->(work_completion)(&(&rdev->dfs_update_channels_wk)->work) ->(work_completion)(&barr->work) FD: 399 BD: 6 +.+.: (work_completion)(&rdev->event_work) ->&rdev->wiphy.mtx ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock FD: 36 BD: 2 +.+.: wireless_nlevent_work ->net_rwsem FD: 1 BD: 87 +.-.: &local->active_txq_lock[i] FD: 39 BD: 86 +.-.: &local->handle_wake_tx_queue_lock ->&local->active_txq_lock[i] ->&local->queue_stop_reason_lock ->&fq->lock ->tk_core.seq.seqcount ->hwsim_radio_lock ->&list->lock#16 FD: 1 BD: 87 ..-.: &local->queue_stop_reason_lock FD: 1 BD: 89 ..-.: &list->lock#16 FD: 98 BD: 1 +.+.: &type->s_umount_key#46/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#32 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&____s->seqcount ->binderfs_minors_mutex ->&dentry->d_lock ->&sb->s_type->i_mutex_key#18 ->&c->lock ->&____s->seqcount#2 ->&rq->__lock ->&n->list_lock FD: 38 BD: 4 +.+.: &sb->s_type->i_lock_key#32 ->&dentry->d_lock FD: 28 BD: 3 +.+.: binderfs_minors_mutex ->binderfs_minors.xa_lock ->&rq->__lock ->rcu_node_0 FD: 1 BD: 4 ....: binderfs_minors.xa_lock FD: 78 BD: 2 +.+.: &sb->s_type->i_mutex_key#18 ->&sb->s_type->i_lock_key#32 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&c->lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&____s->seqcount#2 ->&rq->__lock ->&n->list_lock ->remove_cache_srcu FD: 1 BD: 3 +.+.: iunique_lock FD: 649 BD: 2 +.+.: &type->i_mutex_dir_key#6/1 ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->tomoyo_ss ->&root->kernfs_iattr_rwsem ->cgroup_mutex FD: 66 BD: 1 .+.+: kn->active#48 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->remove_cache_srcu ->&____s->seqcount#2 ->&rq->__lock ->&____s->seqcount FD: 66 BD: 1 ++++: kn->active#49 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] ->&n->list_lock ->&____s->seqcount ->&____s->seqcount#2 ->remove_cache_srcu ->&rq->__lock FD: 3 BD: 101 ..-.: cgroup_threadgroup_rwsem.rss.gp_wait.lock ->&obj_hash[i].lock FD: 33 BD: 1 +.-.: &local->rx_path_lock ->&list->lock#15 ->&rdev->wiphy_work_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 17 BD: 56 +...: &sta->lock ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock FD: 17 BD: 56 +...: &sta->rate_ctrl_lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->krc.lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 1 BD: 62 +...: &entry->crc_lock FD: 27 BD: 101 ....: cgroup_threadgroup_rwsem.waiters.lock ->&p->pi_lock FD: 1 BD: 16 +.+.: (wq_completion)cpuset_migrate_mm FD: 649 BD: 2 +.+.: &type->i_mutex_dir_key#7/1 ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->tomoyo_ss ->&root->kernfs_iattr_rwsem ->cgroup_mutex ->pool_lock#2 ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->cgroup_mutex.wait_lock ->&p->pi_lock ->&sb->s_type->i_lock_key#31 FD: 68 BD: 1 ++++: kn->active#50 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->stock_lock ->&c->lock ->&n->list_lock ->&rq->__lock ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 102 ....: cpuset_attach_wq.lock FD: 2 BD: 636 ..-.: stock_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 67 BD: 1 .+.+: kn->active#51 ->fs_reclaim ->stock_lock ->pool_lock#2 ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock FD: 68 BD: 1 .+.+: kn->active#52 ->fs_reclaim ->stock_lock ->&kernfs_locks->open_file_mutex[count] ->memcg_max_mutex ->&c->lock ->&n->list_lock ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 8 +.+.: memcg_max_mutex FD: 1 BD: 3 +.+.: &aux->poke_mutex FD: 1 BD: 6 ....: &per_cpu(xt_recseq, i) FD: 2 BD: 1 +.-.: (&tun->flow_gc_timer) ->&tun->lock FD: 1 BD: 53 +.-.: &tun->lock FD: 255 BD: 5 +.+.: nf_nat_proto_mutex ->fs_reclaim ->pool_lock#2 ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->stock_lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->krc.lock ->&n->list_lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 10 BD: 57 +...: &bat_priv->tt.last_changeset_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock FD: 264 BD: 1 +.+.: loop_validate_mutex ->&lo->lo_mutex ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock ->loop_validate_mutex.wait_lock ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->&____s->seqcount ->pool_lock#2 ->&cfs_rq->removed.lock FD: 96 BD: 3 ++++: &ids->rwsem ->fs_reclaim ->stock_lock ->pool_lock#2 ->lock ->&obj_hash[i].lock ->&c->lock ->rcu_node_0 ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&cfs_rq->removed.lock ->&sem->wait_lock ->&p->pi_lock FD: 47 BD: 198 +.+.: &new->lock#2 ->&c->lock ->pool_lock#2 ->&undo_list->lock ->&obj_hash[i].lock ->krc.lock ->stock_lock ->&n->list_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->(console_sem).lock ->&____s->seqcount#2 ->&____s->seqcount ->&sma->sems[i].lock ->quarantine_lock FD: 209 BD: 1 +.+.: uuid_mutex ->fs_reclaim ->&c->lock ->pool_lock#2 ->&type->i_mutex_dir_key#3 ->&obj_hash[i].lock ->&rq->__lock FD: 1 BD: 96 +.+.: gdp_mutex.wait_lock FD: 1 BD: 1 +.-.: rlock-AF_INET FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#44 ->&rq->__lock FD: 1 BD: 57 ....: key#25 FD: 2 BD: 6 +.+.: (work_completion)(&(&fw_cache.work)->work) ->&fw_cache.name_lock FD: 13 BD: 199 +...: map_idr_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 2 +.+.: misc_mtx.wait_lock FD: 41 BD: 2 +.+.: (work_completion)(&map->work) ->stock_lock ->&obj_hash[i].lock ->&rq->__lock ->pool_lock#2 ->quarantine_lock ->vmap_area_lock ->purge_vmap_area_lock ->pcpu_lock ->&x->wait#2 ->&cfs_rq->removed.lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 27 BD: 1 +.+.: sk_lock-AF_NFC/1 ->slock-AF_NFC ->&rq->__lock FD: 1 BD: 1 +.+.: (work_completion)(&udc->vbus_work) FD: 1 BD: 2 +...: clock-AF_ROSE FD: 31 BD: 2 +.+.: sk_lock-AF_ROSE ->slock-AF_ROSE ->rose_list_lock ->&obj_hash[i].lock ->rlock-AF_ROSE ->&rq->__lock ->rose_node_list_lock FD: 1 BD: 3 +...: slock-AF_ROSE FD: 1 BD: 3 +...: rose_list_lock FD: 1 BD: 3 ....: rlock-AF_ROSE FD: 15 BD: 52 +.-.: (&hsr->prune_timer) ->&hsr->list_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 62 .+.-: &table->lock#3 FD: 1 BD: 7 +.+.: usbfs_mutex FD: 1 BD: 5 +.+.: &bus->devnum_next_mutex FD: 1 BD: 1 +...: btf_idr_lock FD: 41 BD: 2 ..-.: &dev->lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->&c->lock ->&n->list_lock FD: 1 BD: 1 ....: &(&ctx->fallback_work)->timer FD: 27 BD: 115 -.-.: &rq_wait->wait ->&p->pi_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#398 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#672 ->&rq->__lock FD: 1 BD: 2 +...: clock-AF_IEEE802154 FD: 31 BD: 1 ..-.: &(&conn->info_timer)->timer FD: 2 BD: 24 +.+.: (work_completion)(&(&conn->info_timer)->work) ->&conn->chan_lock FD: 42 BD: 2 +.+.: fqdir_free_work ->rcu_state.barrier_mutex ->&obj_hash[i].lock ->pool_lock#2 ->rcu_state.barrier_mutex.wait_lock ->&p->pi_lock ->&base->lock ->&rq->__lock ->quarantine_lock FD: 1 BD: 3 +.+.: &vmpr->events_lock FD: 1 BD: 1 +.+.: &map->freeze_mutex FD: 219 BD: 2 +.+.: sk_lock-AF_NFC ->slock-AF_NFC ->&k->list_lock ->&k->k_lock ->llcp_devices_lock ->&rq->__lock ->&local->raw_sockets.lock ->fs_reclaim ->pool_lock#2 ->&local->sdp_lock ->&local->sockets.lock ->&mm->mmap_lock ->&c->lock FD: 80 BD: 104 +.+.: oom_lock ->css_set_lock ->&p->alloc_lock ->(console_sem).lock ->&rq->__lock ->kernfs_pr_cont_lock ->cgroup_rstat_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 10 ....: &lo->lo_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#393 ->&rq->__lock FD: 1 BD: 2 ....: &q->lock FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#484 FD: 1 BD: 5 +.+.: (work_completion)(&(&bat_priv->bat_v.ogm_wq)->work) FD: 1 BD: 5 +.+.: &hn->hn_lock FD: 5 BD: 5 +.+.: &bat_priv->bat_v.ogm_buff_mutex ->&obj_hash[i].lock ->pool_lock#2 ->quarantine_lock FD: 1 BD: 5 +...: &bat_priv->gw.list_lock FD: 1 BD: 2 +...: clock-AF_KEY FD: 76 BD: 1 +.+.: (wq_completion)inet_frag_wq ->(work_completion)(&fqdir->destroy_work) FD: 75 BD: 2 +.+.: (work_completion)(&fqdir->destroy_work) ->(work_completion)(&ht->run_work) ->&ht->mutex ->&rq->__lock FD: 49 BD: 52 +.+.: &caifn->caifdevs.lock ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->pool_lock#2 ->&this->info_list_lock ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->pool_lock ->&cfs_rq->removed.lock ->&rnp->exp_wq[2] ->&rnp->exp_lock ->rcu_state.exp_mutex FD: 1 BD: 114 +.-.: &nf_nat_locks[i] FD: 31 BD: 1 +.-.: (&sdp->delay_work) FD: 4 BD: 5 +...: vsock_table_lock ->batched_entropy_u32.lock FD: 36 BD: 53 +.-.: (&peer->timer_persistent_keepalive) ->pool_lock#2 ->&list->lock#14 ->tk_core.seq.seqcount ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->init_task.mems_allowed_seq.seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 219 BD: 4 +.+.: sk_lock-AF_VSOCK ->slock-AF_VSOCK ->&mm->mmap_lock ->vsock_table_lock ->clock-AF_VSOCK ->&rq->__lock ->fs_reclaim ->pool_lock#2 ->&vvs->rx_lock ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->&list->lock#21 ->&ei->socket.wq.wait ->&vvs->tx_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu ->&cfs_rq->removed.lock ->&meta->lock ->quarantine_lock FD: 28 BD: 5 +...: slock-AF_VSOCK ->&sk->sk_lock.wq FD: 1 BD: 5 +...: clock-AF_VSOCK FD: 1 BD: 2 +...: clock-AF_NFC FD: 1956 BD: 1 +.+.: &ndev->req_lock ->&wq->mutex ->(&ndev->cmd_timer) ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&ndev->data_timer) ->(wq_completion)nfc2_nci_cmd_wq#375 ->(wq_completion)nfc3_nci_cmd_wq#164 ->(wq_completion)nfc2_nci_cmd_wq#376 ->(wq_completion)nfc3_nci_cmd_wq#165 ->(wq_completion)nfc2_nci_cmd_wq#377 ->(wq_completion)nfc2_nci_cmd_wq#378 ->(wq_completion)nfc3_nci_cmd_wq#166 ->(wq_completion)nfc2_nci_cmd_wq#379 ->(wq_completion)nfc2_nci_cmd_wq#380 ->(wq_completion)nfc3_nci_cmd_wq#167 ->(wq_completion)nfc4_nci_cmd_wq#37 ->(wq_completion)nfc3_nci_cmd_wq#168 ->(wq_completion)nfc4_nci_cmd_wq#38 ->(wq_completion)nfc3_nci_cmd_wq#169 ->(wq_completion)nfc3_nci_cmd_wq#170 ->(wq_completion)nfc3_nci_cmd_wq#171 ->(wq_completion)nfc3_nci_cmd_wq#172 ->(wq_completion)nfc3_nci_cmd_wq#173 ->(wq_completion)nfc3_nci_cmd_wq#174 ->(wq_completion)nfc2_nci_cmd_wq#381 ->(wq_completion)nfc2_nci_cmd_wq#382 ->(wq_completion)nfc3_nci_cmd_wq#175 ->(wq_completion)nfc5_nci_cmd_wq#13 ->(wq_completion)nfc6_nci_cmd_wq#3 ->(wq_completion)nfc9_nci_cmd_wq#4 ->(wq_completion)nfc11_nci_cmd_wq#3 ->(wq_completion)nfc16_nci_cmd_wq#3 ->(wq_completion)nfc16_nci_cmd_wq#4 ->(wq_completion)nfc22_nci_cmd_wq#3 ->(wq_completion)nfc21_nci_cmd_wq#5 ->(wq_completion)nfc25_nci_cmd_wq#4 ->(wq_completion)nfc25_nci_cmd_wq#5 ->(wq_completion)nfc2_nci_cmd_wq#383 ->(wq_completion)nfc3_nci_cmd_wq#176 ->(wq_completion)nfc4_nci_cmd_wq#39 ->(wq_completion)nfc7_nci_cmd_wq#3 ->(wq_completion)nfc5_nci_cmd_wq#14 ->(wq_completion)nfc6_nci_cmd_wq#4 ->(wq_completion)nfc8_nci_cmd_wq#4 ->(wq_completion)nfc10_nci_cmd_wq#4 ->(wq_completion)nfc9_nci_cmd_wq#5 ->(wq_completion)nfc12_nci_cmd_wq#4 ->(wq_completion)nfc11_nci_cmd_wq#4 ->(wq_completion)nfc13_nci_cmd_wq#3 ->(wq_completion)nfc14_nci_cmd_wq#4 ->(wq_completion)nfc15_nci_cmd_wq#4 ->(wq_completion)nfc17_nci_cmd_wq#7 ->(wq_completion)nfc16_nci_cmd_wq#5 ->(wq_completion)nfc18_nci_cmd_wq#5 ->(wq_completion)nfc19_nci_cmd_wq#3 ->(wq_completion)nfc20_nci_cmd_wq#3 ->(wq_completion)nfc23_nci_cmd_wq#5 ->(wq_completion)nfc21_nci_cmd_wq#6 ->(wq_completion)nfc22_nci_cmd_wq#4 ->(wq_completion)nfc24_nci_cmd_wq#3 ->(wq_completion)nfc26_nci_cmd_wq#5 ->(wq_completion)nfc27_nci_cmd_wq#4 ->(wq_completion)nfc25_nci_cmd_wq#6 ->(wq_completion)nfc28_nci_cmd_wq#3 ->(wq_completion)nfc34_nci_cmd_wq#3 ->(wq_completion)nfc33_nci_cmd_wq#3 ->(wq_completion)nfc32_nci_cmd_wq#3 ->(wq_completion)nfc31_nci_cmd_wq#5 ->(wq_completion)nfc30_nci_cmd_wq#3 ->(wq_completion)nfc29_nci_cmd_wq#3 ->(wq_completion)nfc2_nci_cmd_wq#384 ->(wq_completion)nfc3_nci_cmd_wq#177 ->(wq_completion)nfc2_nci_cmd_wq#385 ->(wq_completion)nfc3_nci_cmd_wq#178 ->(wq_completion)nfc3_nci_cmd_wq#179 ->(wq_completion)nfc4_nci_cmd_wq#40 ->(wq_completion)nfc5_nci_cmd_wq#15 ->(wq_completion)nfc6_nci_cmd_wq#5 ->(wq_completion)nfc2_nci_cmd_wq#386 ->(wq_completion)nfc3_nci_cmd_wq#180 ->(wq_completion)nfc5_nci_cmd_wq#16 ->(wq_completion)nfc4_nci_cmd_wq#41 ->(wq_completion)nfc7_nci_cmd_wq#4 ->(wq_completion)nfc2_nci_cmd_wq#387 ->(wq_completion)nfc2_nci_cmd_wq#388 ->(wq_completion)nfc2_nci_cmd_wq#389 ->(wq_completion)nfc3_nci_cmd_wq#181 ->(wq_completion)nfc2_nci_cmd_wq#390 ->(wq_completion)nfc2_nci_cmd_wq#391 ->(wq_completion)nfc4_nci_cmd_wq#42 ->(wq_completion)nfc5_nci_cmd_wq#17 ->(wq_completion)nfc3_nci_cmd_wq#182 ->(wq_completion)nfc2_nci_cmd_wq#392 ->(wq_completion)nfc3_nci_cmd_wq#183 ->(wq_completion)nfc4_nci_cmd_wq#43 ->(wq_completion)nfc2_nci_cmd_wq#393 ->(wq_completion)nfc3_nci_cmd_wq#184 ->(wq_completion)nfc4_nci_cmd_wq#44 ->(wq_completion)nfc2_nci_cmd_wq#394 ->(wq_completion)nfc2_nci_cmd_wq#395 ->(wq_completion)nfc3_nci_cmd_wq#185 ->(wq_completion)nfc2_nci_cmd_wq#396 ->(wq_completion)nfc2_nci_cmd_wq#397 ->(wq_completion)nfc3_nci_cmd_wq#186 ->(wq_completion)nfc2_nci_cmd_wq#398 ->(wq_completion)nfc3_nci_cmd_wq#187 ->(wq_completion)nfc2_nci_cmd_wq#399 ->(wq_completion)nfc3_nci_cmd_wq#189 ->(wq_completion)nfc2_nci_cmd_wq#400 ->(wq_completion)nfc2_nci_cmd_wq#401 ->(wq_completion)nfc2_nci_cmd_wq#402 ->(wq_completion)nfc3_nci_cmd_wq#190 ->(wq_completion)nfc2_nci_cmd_wq#403 ->(wq_completion)nfc2_nci_cmd_wq#404 ->(wq_completion)nfc3_nci_cmd_wq#191 ->(wq_completion)nfc2_nci_cmd_wq#405 ->(wq_completion)nfc3_nci_cmd_wq#192 ->(wq_completion)nfc4_nci_cmd_wq#45 ->(wq_completion)nfc2_nci_cmd_wq#406 ->(wq_completion)nfc2_nci_cmd_wq#407 ->(wq_completion)nfc3_nci_cmd_wq#193 ->(wq_completion)nfc2_nci_cmd_wq#408 ->(wq_completion)nfc2_nci_cmd_wq#409 ->(wq_completion)nfc3_nci_cmd_wq#194 ->(wq_completion)nfc2_nci_cmd_wq#410 ->(wq_completion)nfc2_nci_cmd_wq#411 ->(wq_completion)nfc3_nci_cmd_wq#195 ->(wq_completion)nfc2_nci_cmd_wq#412 ->(wq_completion)nfc3_nci_cmd_wq#196 ->(wq_completion)nfc4_nci_cmd_wq#46 ->(wq_completion)nfc2_nci_cmd_wq#413 ->(wq_completion)nfc2_nci_cmd_wq#414 ->(wq_completion)nfc3_nci_cmd_wq#198 ->(wq_completion)nfc4_nci_cmd_wq#47 ->(wq_completion)nfc3_nci_cmd_wq#199 ->(wq_completion)nfc4_nci_cmd_wq#48 ->(wq_completion)nfc2_nci_cmd_wq#415 ->(wq_completion)nfc5_nci_cmd_wq#18 ->(wq_completion)nfc2_nci_cmd_wq#416 ->(wq_completion)nfc3_nci_cmd_wq#200 ->(wq_completion)nfc2_nci_cmd_wq#417 ->(wq_completion)nfc4_nci_cmd_wq#49 ->(wq_completion)nfc5_nci_cmd_wq#19 ->(wq_completion)nfc2_nci_cmd_wq#418 ->(wq_completion)nfc4_nci_cmd_wq#51 ->(wq_completion)nfc2_nci_cmd_wq#419 ->(wq_completion)nfc3_nci_cmd_wq#201 ->(wq_completion)nfc4_nci_cmd_wq#52 ->(wq_completion)nfc2_nci_cmd_wq#420 ->(wq_completion)nfc3_nci_cmd_wq#202 ->(wq_completion)nfc2_nci_cmd_wq#421 ->(wq_completion)nfc4_nci_cmd_wq#53 ->(wq_completion)nfc2_nci_cmd_wq#422 ->(wq_completion)nfc2_nci_cmd_wq#423 ->(wq_completion)nfc3_nci_cmd_wq#203 ->(wq_completion)nfc4_nci_cmd_wq#54 ->(wq_completion)nfc5_nci_cmd_wq#21 ->(wq_completion)nfc2_nci_cmd_wq#424 ->(wq_completion)nfc3_nci_cmd_wq#204 ->(wq_completion)nfc2_nci_cmd_wq#425 ->(wq_completion)nfc3_nci_cmd_wq#205 ->(wq_completion)nfc2_nci_cmd_wq#426 ->(wq_completion)nfc3_nci_cmd_wq#206 ->(wq_completion)nfc3_nci_cmd_wq#207 ->(wq_completion)nfc2_nci_cmd_wq#427 ->(wq_completion)nfc2_nci_cmd_wq#428 ->(wq_completion)nfc3_nci_cmd_wq#209 ->(wq_completion)nfc4_nci_cmd_wq#55 ->(wq_completion)nfc2_nci_cmd_wq#429 ->(wq_completion)nfc3_nci_cmd_wq#210 ->(wq_completion)nfc2_nci_cmd_wq#430 ->(wq_completion)nfc3_nci_cmd_wq#211 ->(wq_completion)nfc3_nci_cmd_wq#212 ->(wq_completion)nfc2_nci_cmd_wq#431 ->(wq_completion)nfc3_nci_cmd_wq#213 ->(wq_completion)nfc4_nci_cmd_wq#56 ->(wq_completion)nfc5_nci_cmd_wq#22 ->(wq_completion)nfc2_nci_cmd_wq#432 ->(wq_completion)nfc2_nci_cmd_wq#433 ->(wq_completion)nfc3_nci_cmd_wq#214 ->(wq_completion)nfc2_nci_cmd_wq#434 ->(wq_completion)nfc3_nci_cmd_wq#215 ->(wq_completion)nfc2_nci_cmd_wq#435 ->(wq_completion)nfc3_nci_cmd_wq#216 ->(wq_completion)nfc2_nci_cmd_wq#436 ->(wq_completion)nfc3_nci_cmd_wq#217 ->(wq_completion)nfc4_nci_cmd_wq#57 ->(wq_completion)nfc2_nci_cmd_wq#437 ->(wq_completion)nfc3_nci_cmd_wq#218 ->(wq_completion)nfc4_nci_cmd_wq#58 ->(wq_completion)nfc2_nci_cmd_wq#438 ->(wq_completion)nfc2_nci_cmd_wq#439 ->(wq_completion)nfc3_nci_cmd_wq#219 ->(wq_completion)nfc2_nci_cmd_wq#440 ->(wq_completion)nfc3_nci_cmd_wq#220 ->(wq_completion)nfc2_nci_cmd_wq#441 ->(wq_completion)nfc2_nci_cmd_wq#442 ->(wq_completion)nfc3_nci_cmd_wq#221 ->(wq_completion)nfc2_nci_cmd_wq#443 ->(wq_completion)nfc3_nci_cmd_wq#222 ->(wq_completion)nfc2_nci_cmd_wq#444 ->(wq_completion)nfc3_nci_cmd_wq#223 ->(wq_completion)nfc4_nci_cmd_wq#59 ->(wq_completion)nfc2_nci_cmd_wq#445 ->(wq_completion)nfc2_nci_cmd_wq#446 ->(wq_completion)nfc3_nci_cmd_wq#225 ->(wq_completion)nfc2_nci_cmd_wq#447 ->(wq_completion)nfc3_nci_cmd_wq#226 ->(wq_completion)nfc2_nci_cmd_wq#448 ->(wq_completion)nfc2_nci_cmd_wq#449 ->(wq_completion)nfc3_nci_cmd_wq#227 ->(wq_completion)nfc2_nci_cmd_wq#450 ->(wq_completion)nfc4_nci_cmd_wq#60 ->(wq_completion)nfc2_nci_cmd_wq#451 ->(wq_completion)nfc2_nci_cmd_wq#452 ->(wq_completion)nfc3_nci_cmd_wq#228 ->(wq_completion)nfc2_nci_cmd_wq#453 ->(wq_completion)nfc4_nci_cmd_wq#61 ->(wq_completion)nfc3_nci_cmd_wq#229 ->(wq_completion)nfc2_nci_cmd_wq#454 ->(wq_completion)nfc3_nci_cmd_wq#230 ->(wq_completion)nfc4_nci_cmd_wq#62 ->(wq_completion)nfc2_nci_cmd_wq#455 ->(wq_completion)nfc3_nci_cmd_wq#231 ->(wq_completion)nfc2_nci_cmd_wq#456 ->(wq_completion)nfc3_nci_cmd_wq#232 ->(wq_completion)nfc2_nci_cmd_wq#457 ->(wq_completion)nfc2_nci_cmd_wq#458 ->(wq_completion)nfc2_nci_cmd_wq#459 ->(wq_completion)nfc2_nci_cmd_wq#460 ->(wq_completion)nfc3_nci_cmd_wq#233 ->(wq_completion)nfc2_nci_cmd_wq#461 ->(wq_completion)nfc3_nci_cmd_wq#234 ->(wq_completion)nfc2_nci_cmd_wq#462 ->(wq_completion)nfc3_nci_cmd_wq#235 ->(wq_completion)nfc4_nci_cmd_wq#63 ->(wq_completion)nfc2_nci_cmd_wq#463 ->(wq_completion)nfc3_nci_cmd_wq#236 ->(wq_completion)nfc2_nci_cmd_wq#464 ->(wq_completion)nfc2_nci_cmd_wq#465 ->(wq_completion)nfc2_nci_cmd_wq#466 ->(wq_completion)nfc2_nci_cmd_wq#467 ->(wq_completion)nfc3_nci_cmd_wq#237 ->(wq_completion)nfc4_nci_cmd_wq#64 ->(wq_completion)nfc5_nci_cmd_wq#23 ->(wq_completion)nfc2_nci_cmd_wq#468 ->(wq_completion)nfc2_nci_cmd_wq#469 ->(wq_completion)nfc2_nci_cmd_wq#470 ->(wq_completion)nfc3_nci_cmd_wq#238 ->(wq_completion)nfc4_nci_cmd_wq#65 ->(wq_completion)nfc2_nci_cmd_wq#471 ->(wq_completion)nfc3_nci_cmd_wq#239 ->(wq_completion)nfc2_nci_cmd_wq#472 ->(wq_completion)nfc3_nci_cmd_wq#240 ->(wq_completion)nfc2_nci_cmd_wq#473 ->(wq_completion)nfc3_nci_cmd_wq#241 ->(wq_completion)nfc2_nci_cmd_wq#474 ->(wq_completion)nfc3_nci_cmd_wq#242 ->(wq_completion)nfc2_nci_cmd_wq#475 ->(wq_completion)nfc3_nci_cmd_wq#243 ->(wq_completion)nfc2_nci_cmd_wq#476 ->(wq_completion)nfc4_nci_cmd_wq#66 ->(wq_completion)nfc2_nci_cmd_wq#477 ->(wq_completion)nfc2_nci_cmd_wq#478 ->(wq_completion)nfc2_nci_cmd_wq#479 ->(wq_completion)nfc3_nci_cmd_wq#244 ->(wq_completion)nfc4_nci_cmd_wq#67 ->(wq_completion)nfc2_nci_cmd_wq#480 ->(wq_completion)nfc3_nci_cmd_wq#245 ->(wq_completion)nfc2_nci_cmd_wq#481 ->(wq_completion)nfc4_nci_cmd_wq#68 ->(wq_completion)nfc2_nci_cmd_wq#482 ->(wq_completion)nfc3_nci_cmd_wq#246 ->(wq_completion)nfc2_nci_cmd_wq#483 ->(wq_completion)nfc4_nci_cmd_wq#69 ->(wq_completion)nfc5_nci_cmd_wq#24 ->(wq_completion)nfc2_nci_cmd_wq#484 ->(wq_completion)nfc3_nci_cmd_wq#247 ->(wq_completion)nfc2_nci_cmd_wq#485 ->(wq_completion)nfc4_nci_cmd_wq#70 ->(wq_completion)nfc3_nci_cmd_wq#248 ->(wq_completion)nfc2_nci_cmd_wq#486 ->(wq_completion)nfc2_nci_cmd_wq#487 ->(wq_completion)nfc3_nci_cmd_wq#249 ->(wq_completion)nfc2_nci_cmd_wq#488 ->(wq_completion)nfc4_nci_cmd_wq#71 ->(wq_completion)nfc2_nci_cmd_wq#489 ->(wq_completion)nfc2_nci_cmd_wq#490 ->(wq_completion)nfc3_nci_cmd_wq#250 ->(wq_completion)nfc4_nci_cmd_wq#72 ->(wq_completion)nfc3_nci_cmd_wq#251 ->(wq_completion)nfc2_nci_cmd_wq#491 ->(wq_completion)nfc4_nci_cmd_wq#73 ->(wq_completion)nfc2_nci_cmd_wq#492 ->(wq_completion)nfc2_nci_cmd_wq#493 ->(wq_completion)nfc3_nci_cmd_wq#252 ->(wq_completion)nfc2_nci_cmd_wq#494 ->(wq_completion)nfc3_nci_cmd_wq#253 ->(wq_completion)nfc2_nci_cmd_wq#495 ->(wq_completion)nfc2_nci_cmd_wq#496 ->(wq_completion)nfc3_nci_cmd_wq#254 ->(wq_completion)nfc2_nci_cmd_wq#497 ->(wq_completion)nfc4_nci_cmd_wq#74 ->(wq_completion)nfc2_nci_cmd_wq#498 ->(wq_completion)nfc3_nci_cmd_wq#255 ->(wq_completion)nfc2_nci_cmd_wq#499 ->(wq_completion)nfc4_nci_cmd_wq#75 ->(wq_completion)nfc5_nci_cmd_wq#25 ->(wq_completion)nfc2_nci_cmd_wq#500 ->(wq_completion)nfc2_nci_cmd_wq#501 ->(wq_completion)nfc3_nci_cmd_wq#256 ->(wq_completion)nfc2_nci_cmd_wq#502 ->(wq_completion)nfc4_nci_cmd_wq#76 ->(wq_completion)nfc2_nci_cmd_wq#503 ->(wq_completion)nfc4_nci_cmd_wq#77 ->(wq_completion)nfc2_nci_cmd_wq#504 ->(wq_completion)nfc6_nci_cmd_wq#6 ->(wq_completion)nfc3_nci_cmd_wq#257 ->(wq_completion)nfc5_nci_cmd_wq#26 ->(wq_completion)nfc2_nci_cmd_wq#505 ->(wq_completion)nfc3_nci_cmd_wq#258 ->(wq_completion)nfc4_nci_cmd_wq#78 ->(wq_completion)nfc2_nci_cmd_wq#506 ->(wq_completion)nfc3_nci_cmd_wq#259 ->(wq_completion)nfc5_nci_cmd_wq#27 ->(wq_completion)nfc2_nci_cmd_wq#507 ->(wq_completion)nfc4_nci_cmd_wq#79 ->(wq_completion)nfc3_nci_cmd_wq#260 ->(wq_completion)nfc4_nci_cmd_wq#80 ->(wq_completion)nfc3_nci_cmd_wq#261 ->(wq_completion)nfc2_nci_cmd_wq#508 ->(wq_completion)nfc4_nci_cmd_wq#81 ->(wq_completion)nfc3_nci_cmd_wq#262 ->(wq_completion)nfc3_nci_cmd_wq#263 ->(wq_completion)nfc2_nci_cmd_wq#509 ->(wq_completion)nfc3_nci_cmd_wq#264 ->(wq_completion)nfc2_nci_cmd_wq#510 ->(wq_completion)nfc2_nci_cmd_wq#511 ->(wq_completion)nfc3_nci_cmd_wq#265 ->(wq_completion)nfc4_nci_cmd_wq#82 ->(wq_completion)nfc2_nci_cmd_wq#512 ->(wq_completion)nfc2_nci_cmd_wq#513 ->(wq_completion)nfc3_nci_cmd_wq#266 ->(wq_completion)nfc2_nci_cmd_wq#514 ->(wq_completion)nfc3_nci_cmd_wq#267 ->(wq_completion)nfc2_nci_cmd_wq#515 ->(wq_completion)nfc2_nci_cmd_wq#516 ->(wq_completion)nfc3_nci_cmd_wq#268 ->(wq_completion)nfc2_nci_cmd_wq#517 ->(wq_completion)nfc2_nci_cmd_wq#518 ->(wq_completion)nfc3_nci_cmd_wq#269 ->(wq_completion)nfc2_nci_cmd_wq#519 ->(wq_completion)nfc3_nci_cmd_wq#270 ->(wq_completion)nfc3_nci_cmd_wq#271 ->(wq_completion)nfc4_nci_cmd_wq#83 ->(wq_completion)nfc2_nci_cmd_wq#520 ->(wq_completion)nfc2_nci_cmd_wq#521 ->(wq_completion)nfc3_nci_cmd_wq#272 ->(wq_completion)nfc2_nci_cmd_wq#522 ->(wq_completion)nfc4_nci_cmd_wq#84 ->(wq_completion)nfc2_nci_cmd_wq#523 ->(wq_completion)nfc2_nci_cmd_wq#524 ->(wq_completion)nfc3_nci_cmd_wq#273 ->(wq_completion)nfc2_nci_cmd_wq#525 ->(wq_completion)nfc3_nci_cmd_wq#274 ->(wq_completion)nfc2_nci_cmd_wq#526 ->(wq_completion)nfc3_nci_cmd_wq#275 ->(wq_completion)nfc4_nci_cmd_wq#85 ->(wq_completion)nfc5_nci_cmd_wq#28 ->(wq_completion)nfc2_nci_cmd_wq#527 ->(wq_completion)nfc3_nci_cmd_wq#276 ->(wq_completion)nfc3_nci_cmd_wq#277 ->(wq_completion)nfc2_nci_cmd_wq#528 ->(wq_completion)nfc4_nci_cmd_wq#86 ->(wq_completion)nfc3_nci_cmd_wq#278 ->(wq_completion)nfc2_nci_cmd_wq#529 ->(wq_completion)nfc2_nci_cmd_wq#530 ->(wq_completion)nfc3_nci_cmd_wq#279 ->(wq_completion)nfc3_nci_cmd_wq#280 ->(wq_completion)nfc2_nci_cmd_wq#531 ->(wq_completion)nfc4_nci_cmd_wq#87 ->(wq_completion)nfc2_nci_cmd_wq#532 ->(wq_completion)nfc3_nci_cmd_wq#281 ->(wq_completion)nfc3_nci_cmd_wq#282 ->(wq_completion)nfc2_nci_cmd_wq#533 ->(wq_completion)nfc4_nci_cmd_wq#88 ->(wq_completion)nfc3_nci_cmd_wq#283 ->(wq_completion)nfc2_nci_cmd_wq#534 ->(wq_completion)nfc3_nci_cmd_wq#284 ->(wq_completion)nfc2_nci_cmd_wq#535 ->(wq_completion)nfc3_nci_cmd_wq#285 ->(wq_completion)nfc3_nci_cmd_wq#286 ->(wq_completion)nfc2_nci_cmd_wq#536 ->(wq_completion)nfc4_nci_cmd_wq#89 ->(wq_completion)nfc2_nci_cmd_wq#537 ->(wq_completion)nfc3_nci_cmd_wq#287 ->(wq_completion)nfc2_nci_cmd_wq#538 ->(wq_completion)nfc3_nci_cmd_wq#288 ->(wq_completion)nfc2_nci_cmd_wq#539 ->(wq_completion)nfc2_nci_cmd_wq#540 ->(wq_completion)nfc2_nci_cmd_wq#541 ->(wq_completion)nfc3_nci_cmd_wq#289 ->(wq_completion)nfc2_nci_cmd_wq#542 ->(wq_completion)nfc4_nci_cmd_wq#90 ->(wq_completion)nfc5_nci_cmd_wq#29 ->(wq_completion)nfc2_nci_cmd_wq#543 ->(wq_completion)nfc2_nci_cmd_wq#544 ->(wq_completion)nfc2_nci_cmd_wq#545 ->(wq_completion)nfc3_nci_cmd_wq#290 ->(wq_completion)nfc2_nci_cmd_wq#546 ->(wq_completion)nfc3_nci_cmd_wq#291 ->(wq_completion)nfc2_nci_cmd_wq#547 ->(wq_completion)nfc3_nci_cmd_wq#292 ->(wq_completion)nfc2_nci_cmd_wq#548 ->(wq_completion)nfc3_nci_cmd_wq#293 ->(wq_completion)nfc2_nci_cmd_wq#549 ->(wq_completion)nfc4_nci_cmd_wq#91 ->(wq_completion)nfc2_nci_cmd_wq#550 ->(wq_completion)nfc2_nci_cmd_wq#551 ->(wq_completion)nfc2_nci_cmd_wq#552 ->(wq_completion)nfc3_nci_cmd_wq#294 ->(wq_completion)nfc2_nci_cmd_wq#553 ->(wq_completion)nfc2_nci_cmd_wq#554 ->(wq_completion)nfc2_nci_cmd_wq#555 ->(wq_completion)nfc3_nci_cmd_wq#295 ->(wq_completion)nfc3_nci_cmd_wq#296 ->(wq_completion)nfc2_nci_cmd_wq#556 ->(wq_completion)nfc4_nci_cmd_wq#92 ->(wq_completion)nfc2_nci_cmd_wq#557 ->(wq_completion)nfc3_nci_cmd_wq#297 ->(wq_completion)nfc4_nci_cmd_wq#93 ->(wq_completion)nfc2_nci_cmd_wq#558 ->(wq_completion)nfc2_nci_cmd_wq#559 ->(wq_completion)nfc3_nci_cmd_wq#298 ->(wq_completion)nfc2_nci_cmd_wq#560 ->(wq_completion)nfc3_nci_cmd_wq#299 ->(wq_completion)nfc2_nci_cmd_wq#561 ->(wq_completion)nfc3_nci_cmd_wq#300 ->(wq_completion)nfc4_nci_cmd_wq#94 ->(wq_completion)nfc2_nci_cmd_wq#562 ->(wq_completion)nfc3_nci_cmd_wq#301 ->(wq_completion)nfc2_nci_cmd_wq#563 ->(wq_completion)nfc3_nci_cmd_wq#302 ->(wq_completion)nfc2_nci_cmd_wq#564 ->(wq_completion)nfc3_nci_cmd_wq#303 ->(wq_completion)nfc2_nci_cmd_wq#565 ->(wq_completion)nfc3_nci_cmd_wq#304 ->(wq_completion)nfc2_nci_cmd_wq#566 ->(wq_completion)nfc2_nci_cmd_wq#567 ->(wq_completion)nfc2_nci_cmd_wq#568 ->(wq_completion)nfc3_nci_cmd_wq#305 ->(wq_completion)nfc2_nci_cmd_wq#569 ->(wq_completion)nfc2_nci_cmd_wq#570 ->(wq_completion)nfc2_nci_cmd_wq#571 ->(wq_completion)nfc2_nci_cmd_wq#572 ->(wq_completion)nfc3_nci_cmd_wq#306 ->(wq_completion)nfc2_nci_cmd_wq#573 ->(wq_completion)nfc3_nci_cmd_wq#307 ->(wq_completion)nfc2_nci_cmd_wq#574 ->(wq_completion)nfc2_nci_cmd_wq#575 ->(wq_completion)nfc3_nci_cmd_wq#308 ->(wq_completion)nfc2_nci_cmd_wq#576 ->(wq_completion)nfc2_nci_cmd_wq#577 ->(wq_completion)nfc2_nci_cmd_wq#578 ->(wq_completion)nfc2_nci_cmd_wq#579 ->(wq_completion)nfc3_nci_cmd_wq#309 ->(wq_completion)nfc2_nci_cmd_wq#580 ->(wq_completion)nfc3_nci_cmd_wq#310 ->(wq_completion)nfc2_nci_cmd_wq#581 ->(wq_completion)nfc3_nci_cmd_wq#311 ->(wq_completion)nfc2_nci_cmd_wq#582 ->(wq_completion)nfc2_nci_cmd_wq#583 ->(wq_completion)nfc2_nci_cmd_wq#584 ->(wq_completion)nfc3_nci_cmd_wq#312 ->(wq_completion)nfc4_nci_cmd_wq#95 ->(wq_completion)nfc2_nci_cmd_wq#585 ->(wq_completion)nfc3_nci_cmd_wq#313 ->(wq_completion)nfc4_nci_cmd_wq#96 ->(wq_completion)nfc2_nci_cmd_wq#586 ->(wq_completion)nfc2_nci_cmd_wq#587 ->(wq_completion)nfc4_nci_cmd_wq#97 ->(wq_completion)nfc3_nci_cmd_wq#314 ->(wq_completion)nfc5_nci_cmd_wq#30 ->(wq_completion)nfc2_nci_cmd_wq#588 ->(wq_completion)nfc3_nci_cmd_wq#315 ->(wq_completion)nfc2_nci_cmd_wq#589 ->(wq_completion)nfc3_nci_cmd_wq#316 ->(wq_completion)nfc2_nci_cmd_wq#590 ->(wq_completion)nfc2_nci_cmd_wq#591 ->(wq_completion)nfc2_nci_cmd_wq#592 ->(wq_completion)nfc3_nci_cmd_wq#317 ->(wq_completion)nfc4_nci_cmd_wq#98 ->(wq_completion)nfc2_nci_cmd_wq#593 ->(wq_completion)nfc2_nci_cmd_wq#594 ->(wq_completion)nfc3_nci_cmd_wq#318 ->(wq_completion)nfc2_nci_cmd_wq#595 ->(wq_completion)nfc4_nci_cmd_wq#99 ->(wq_completion)nfc2_nci_cmd_wq#596 ->(wq_completion)nfc2_nci_cmd_wq#597 ->(wq_completion)nfc3_nci_cmd_wq#319 ->(wq_completion)nfc2_nci_cmd_wq#598 ->(wq_completion)nfc3_nci_cmd_wq#320 ->(wq_completion)nfc2_nci_cmd_wq#599 ->(wq_completion)nfc2_nci_cmd_wq#600 ->(wq_completion)nfc4_nci_cmd_wq#100 ->(wq_completion)nfc3_nci_cmd_wq#321 ->(wq_completion)nfc2_nci_cmd_wq#601 ->(wq_completion)nfc2_nci_cmd_wq#602 ->(wq_completion)nfc3_nci_cmd_wq#322 ->(wq_completion)nfc2_nci_cmd_wq#603 ->(wq_completion)nfc2_nci_cmd_wq#604 ->(wq_completion)nfc2_nci_cmd_wq#605 ->(wq_completion)nfc3_nci_cmd_wq#323 ->(wq_completion)nfc2_nci_cmd_wq#606 ->(wq_completion)nfc3_nci_cmd_wq#324 ->(wq_completion)nfc4_nci_cmd_wq#101 ->(wq_completion)nfc2_nci_cmd_wq#607 ->(wq_completion)nfc3_nci_cmd_wq#325 ->(wq_completion)nfc2_nci_cmd_wq#608 ->(wq_completion)nfc4_nci_cmd_wq#102 ->(wq_completion)nfc2_nci_cmd_wq#609 ->(wq_completion)nfc2_nci_cmd_wq#610 ->(wq_completion)nfc3_nci_cmd_wq#326 ->(wq_completion)nfc2_nci_cmd_wq#611 ->(wq_completion)nfc2_nci_cmd_wq#612 ->(wq_completion)nfc3_nci_cmd_wq#327 ->(wq_completion)nfc4_nci_cmd_wq#103 ->(wq_completion)nfc2_nci_cmd_wq#613 ->(wq_completion)nfc3_nci_cmd_wq#328 ->(wq_completion)nfc2_nci_cmd_wq#614 ->(wq_completion)nfc2_nci_cmd_wq#615 ->(wq_completion)nfc2_nci_cmd_wq#616 ->(wq_completion)nfc3_nci_cmd_wq#329 ->(wq_completion)nfc3_nci_cmd_wq#330 ->(wq_completion)nfc2_nci_cmd_wq#617 ->(wq_completion)nfc4_nci_cmd_wq#104 ->(wq_completion)nfc2_nci_cmd_wq#618 ->(wq_completion)nfc3_nci_cmd_wq#331 ->(wq_completion)nfc2_nci_cmd_wq#619 ->(wq_completion)nfc3_nci_cmd_wq#332 ->(wq_completion)nfc2_nci_cmd_wq#620 ->(wq_completion)nfc2_nci_cmd_wq#621 ->(wq_completion)nfc4_nci_cmd_wq#105 ->(wq_completion)nfc3_nci_cmd_wq#333 ->(wq_completion)nfc2_nci_cmd_wq#622 ->(wq_completion)nfc2_nci_cmd_wq#623 ->(wq_completion)nfc3_nci_cmd_wq#334 ->(wq_completion)nfc2_nci_cmd_wq#624 ->(wq_completion)nfc3_nci_cmd_wq#335 ->(wq_completion)nfc2_nci_cmd_wq#625 ->(wq_completion)nfc3_nci_cmd_wq#336 ->(wq_completion)nfc3_nci_cmd_wq#337 ->(wq_completion)nfc4_nci_cmd_wq#106 ->(wq_completion)nfc2_nci_cmd_wq#626 ->(wq_completion)nfc3_nci_cmd_wq#338 ->(wq_completion)nfc2_nci_cmd_wq#627 ->(wq_completion)nfc3_nci_cmd_wq#339 ->(wq_completion)nfc2_nci_cmd_wq#628 ->(wq_completion)nfc3_nci_cmd_wq#340 ->(wq_completion)nfc2_nci_cmd_wq#629 ->(wq_completion)nfc3_nci_cmd_wq#341 ->(wq_completion)nfc2_nci_cmd_wq#630 ->(wq_completion)nfc3_nci_cmd_wq#342 ->(wq_completion)nfc2_nci_cmd_wq#631 ->(wq_completion)nfc2_nci_cmd_wq#632 ->(wq_completion)nfc3_nci_cmd_wq#343 ->(wq_completion)nfc4_nci_cmd_wq#107 ->(wq_completion)nfc2_nci_cmd_wq#633 ->(wq_completion)nfc2_nci_cmd_wq#634 ->(wq_completion)nfc2_nci_cmd_wq#635 ->(wq_completion)nfc3_nci_cmd_wq#344 ->(wq_completion)nfc4_nci_cmd_wq#108 ->(wq_completion)nfc2_nci_cmd_wq#636 ->(wq_completion)nfc2_nci_cmd_wq#637 ->(wq_completion)nfc2_nci_cmd_wq#638 ->(wq_completion)nfc3_nci_cmd_wq#345 ->(wq_completion)nfc2_nci_cmd_wq#639 ->(wq_completion)nfc4_nci_cmd_wq#109 ->(wq_completion)nfc3_nci_cmd_wq#346 ->(wq_completion)nfc2_nci_cmd_wq#640 ->(wq_completion)nfc3_nci_cmd_wq#347 ->(wq_completion)nfc2_nci_cmd_wq#641 ->(wq_completion)nfc3_nci_cmd_wq#348 ->(wq_completion)nfc2_nci_cmd_wq#642 ->(wq_completion)nfc3_nci_cmd_wq#349 ->(wq_completion)nfc2_nci_cmd_wq#643 ->(wq_completion)nfc2_nci_cmd_wq#644 ->(wq_completion)nfc3_nci_cmd_wq#350 ->(wq_completion)nfc2_nci_cmd_wq#645 ->(wq_completion)nfc2_nci_cmd_wq#646 ->(wq_completion)nfc3_nci_cmd_wq#351 ->(wq_completion)nfc2_nci_cmd_wq#647 ->(wq_completion)nfc3_nci_cmd_wq#352 ->(wq_completion)nfc2_nci_cmd_wq#648 ->(wq_completion)nfc3_nci_cmd_wq#353 ->(wq_completion)nfc4_nci_cmd_wq#110 ->(wq_completion)nfc5_nci_cmd_wq#31 ->(wq_completion)nfc2_nci_cmd_wq#649 ->(wq_completion)nfc2_nci_cmd_wq#650 ->(wq_completion)nfc3_nci_cmd_wq#354 ->(wq_completion)nfc2_nci_cmd_wq#651 ->(wq_completion)nfc4_nci_cmd_wq#111 ->(wq_completion)nfc2_nci_cmd_wq#652 ->(wq_completion)nfc2_nci_cmd_wq#653 ->(wq_completion)nfc3_nci_cmd_wq#355 ->(wq_completion)nfc2_nci_cmd_wq#654 ->(wq_completion)nfc3_nci_cmd_wq#356 ->(wq_completion)nfc2_nci_cmd_wq#655 ->(wq_completion)nfc2_nci_cmd_wq#656 ->(wq_completion)nfc3_nci_cmd_wq#357 ->(wq_completion)nfc4_nci_cmd_wq#112 ->(wq_completion)nfc2_nci_cmd_wq#657 ->(wq_completion)nfc3_nci_cmd_wq#358 ->(wq_completion)nfc2_nci_cmd_wq#658 ->(wq_completion)nfc3_nci_cmd_wq#359 ->(wq_completion)nfc2_nci_cmd_wq#659 ->(wq_completion)nfc2_nci_cmd_wq#660 ->(wq_completion)nfc3_nci_cmd_wq#360 ->(wq_completion)nfc4_nci_cmd_wq#113 ->(wq_completion)nfc2_nci_cmd_wq#661 ->(wq_completion)nfc3_nci_cmd_wq#361 ->(wq_completion)nfc2_nci_cmd_wq#662 ->(wq_completion)nfc3_nci_cmd_wq#362 ->(wq_completion)nfc2_nci_cmd_wq#663 ->(wq_completion)nfc3_nci_cmd_wq#363 ->(wq_completion)nfc2_nci_cmd_wq#664 ->(wq_completion)nfc3_nci_cmd_wq#364 ->(wq_completion)nfc2_nci_cmd_wq#665 ->(wq_completion)nfc2_nci_cmd_wq#666 ->(wq_completion)nfc3_nci_cmd_wq#365 ->(wq_completion)nfc2_nci_cmd_wq#667 ->(wq_completion)nfc3_nci_cmd_wq#366 ->(wq_completion)nfc2_nci_cmd_wq#668 ->(wq_completion)nfc3_nci_cmd_wq#367 ->(wq_completion)nfc2_nci_cmd_wq#669 ->(wq_completion)nfc3_nci_cmd_wq#368 ->(wq_completion)nfc2_nci_cmd_wq#670 ->(wq_completion)nfc2_nci_cmd_wq#671 ->(wq_completion)nfc3_nci_cmd_wq#369 ->(wq_completion)nfc2_nci_cmd_wq#672 ->(wq_completion)nfc3_nci_cmd_wq#370 ->(wq_completion)nfc4_nci_cmd_wq#114 ->(wq_completion)nfc2_nci_cmd_wq#673 ->(wq_completion)nfc3_nci_cmd_wq#371 ->(wq_completion)nfc2_nci_cmd_wq#674 ->(wq_completion)nfc3_nci_cmd_wq#372 ->(wq_completion)nfc2_nci_cmd_wq#675 ->(wq_completion)nfc3_nci_cmd_wq#373 ->(wq_completion)nfc2_nci_cmd_wq#676 ->(wq_completion)nfc2_nci_cmd_wq#677 ->(wq_completion)nfc3_nci_cmd_wq#374 ->(wq_completion)nfc2_nci_cmd_wq#678 ->(wq_completion)nfc3_nci_cmd_wq#375 ->(wq_completion)nfc2_nci_cmd_wq#679 ->(wq_completion)nfc3_nci_cmd_wq#376 ->(wq_completion)nfc4_nci_cmd_wq#115 ->(wq_completion)nfc2_nci_cmd_wq#680 ->(wq_completion)nfc2_nci_cmd_wq#681 ->(wq_completion)nfc3_nci_cmd_wq#377 ->(wq_completion)nfc2_nci_cmd_wq#682 ->(wq_completion)nfc3_nci_cmd_wq#378 ->(wq_completion)nfc2_nci_cmd_wq#683 ->(wq_completion)nfc3_nci_cmd_wq#379 ->(wq_completion)nfc2_nci_cmd_wq#684 ->(wq_completion)nfc3_nci_cmd_wq#380 ->(wq_completion)nfc2_nci_cmd_wq#685 ->(wq_completion)nfc3_nci_cmd_wq#381 ->(wq_completion)nfc2_nci_cmd_wq#686 ->(wq_completion)nfc2_nci_cmd_wq#687 ->(wq_completion)nfc3_nci_cmd_wq#382 ->(wq_completion)nfc2_nci_cmd_wq#688 ->(wq_completion)nfc3_nci_cmd_wq#383 ->(wq_completion)nfc2_nci_cmd_wq#689 ->(wq_completion)nfc3_nci_cmd_wq#384 ->(wq_completion)nfc2_nci_cmd_wq#690 ->(wq_completion)nfc3_nci_cmd_wq#385 ->(wq_completion)nfc2_nci_cmd_wq#691 ->(wq_completion)nfc2_nci_cmd_wq#692 ->(wq_completion)nfc3_nci_cmd_wq#386 ->(wq_completion)nfc2_nci_cmd_wq#693 ->(wq_completion)nfc3_nci_cmd_wq#387 ->(wq_completion)nfc2_nci_cmd_wq#694 ->(wq_completion)nfc2_nci_cmd_wq#695 ->(wq_completion)nfc3_nci_cmd_wq#388 ->(wq_completion)nfc2_nci_cmd_wq#696 ->(wq_completion)nfc4_nci_cmd_wq#116 ->(wq_completion)nfc3_nci_cmd_wq#389 ->(wq_completion)nfc2_nci_cmd_wq#697 ->(wq_completion)nfc2_nci_cmd_wq#698 ->(wq_completion)nfc3_nci_cmd_wq#390 ->(wq_completion)nfc2_nci_cmd_wq#699 ->(wq_completion)nfc4_nci_cmd_wq#117 ->(wq_completion)nfc2_nci_cmd_wq#700 ->(wq_completion)nfc3_nci_cmd_wq#391 ->(wq_completion)nfc2_nci_cmd_wq#701 ->(wq_completion)nfc4_nci_cmd_wq#118 ->(wq_completion)nfc2_nci_cmd_wq#702 ->(wq_completion)nfc2_nci_cmd_wq#703 ->(wq_completion)nfc3_nci_cmd_wq#392 ->(wq_completion)nfc2_nci_cmd_wq#704 ->(wq_completion)nfc4_nci_cmd_wq#119 ->(wq_completion)nfc3_nci_cmd_wq#393 ->(wq_completion)nfc2_nci_cmd_wq#705 ->(wq_completion)nfc3_nci_cmd_wq#394 ->(wq_completion)nfc3_nci_cmd_wq#395 ->(wq_completion)nfc2_nci_cmd_wq#706 ->(wq_completion)nfc2_nci_cmd_wq#707 ->(wq_completion)nfc3_nci_cmd_wq#396 ->(wq_completion)nfc2_nci_cmd_wq#708 ->(wq_completion)nfc3_nci_cmd_wq#397 ->(wq_completion)nfc2_nci_cmd_wq#709 ->(wq_completion)nfc2_nci_cmd_wq#710 ->(wq_completion)nfc3_nci_cmd_wq#398 ->(wq_completion)nfc2_nci_cmd_wq#711 ->(wq_completion)nfc2_nci_cmd_wq#712 ->(wq_completion)nfc3_nci_cmd_wq#399 ->(wq_completion)nfc2_nci_cmd_wq#713 ->(wq_completion)nfc3_nci_cmd_wq#400 ->(wq_completion)nfc2_nci_cmd_wq#714 ->(wq_completion)nfc2_nci_cmd_wq#715 ->(wq_completion)nfc3_nci_cmd_wq#401 ->(wq_completion)nfc2_nci_cmd_wq#716 ->(wq_completion)nfc3_nci_cmd_wq#402 ->(wq_completion)nfc2_nci_cmd_wq#717 ->(wq_completion)nfc4_nci_cmd_wq#120 ->(wq_completion)nfc2_nci_cmd_wq#718 ->(wq_completion)nfc2_nci_cmd_wq#719 ->(wq_completion)nfc3_nci_cmd_wq#403 ->(wq_completion)nfc4_nci_cmd_wq#121 ->(wq_completion)nfc2_nci_cmd_wq#720 ->(wq_completion)nfc3_nci_cmd_wq#404 ->(wq_completion)nfc2_nci_cmd_wq#721 ->(wq_completion)nfc2_nci_cmd_wq#722 ->(wq_completion)nfc3_nci_cmd_wq#405 ->(wq_completion)nfc4_nci_cmd_wq#122 ->(wq_completion)nfc2_nci_cmd_wq#723 ->(wq_completion)nfc2_nci_cmd_wq#724 ->(wq_completion)nfc2_nci_cmd_wq#725 ->(wq_completion)nfc3_nci_cmd_wq#406 ->(wq_completion)nfc2_nci_cmd_wq#726 ->(wq_completion)nfc2_nci_cmd_wq#727 ->(wq_completion)nfc3_nci_cmd_wq#407 ->(wq_completion)nfc2_nci_cmd_wq#728 ->(wq_completion)nfc3_nci_cmd_wq#408 ->(wq_completion)nfc2_nci_cmd_wq#729 ->(wq_completion)nfc3_nci_cmd_wq#409 ->(wq_completion)nfc2_nci_cmd_wq#730 ->(wq_completion)nfc3_nci_cmd_wq#410 ->(wq_completion)nfc2_nci_cmd_wq#731 ->(wq_completion)nfc2_nci_cmd_wq#732 ->(wq_completion)nfc3_nci_cmd_wq#411 ->(wq_completion)nfc2_nci_cmd_wq#733 ->(wq_completion)nfc3_nci_cmd_wq#412 ->(wq_completion)nfc2_nci_cmd_wq#734 ->(wq_completion)nfc3_nci_cmd_wq#413 ->(wq_completion)nfc2_nci_cmd_wq#735 ->(wq_completion)nfc3_nci_cmd_wq#414 ->(wq_completion)nfc4_nci_cmd_wq#123 ->(wq_completion)nfc2_nci_cmd_wq#736 ->(wq_completion)nfc2_nci_cmd_wq#737 ->(wq_completion)nfc3_nci_cmd_wq#415 ->(wq_completion)nfc2_nci_cmd_wq#738 ->(wq_completion)nfc4_nci_cmd_wq#124 ->(wq_completion)nfc2_nci_cmd_wq#739 ->(wq_completion)nfc2_nci_cmd_wq#740 ->(wq_completion)nfc3_nci_cmd_wq#416 ->(wq_completion)nfc4_nci_cmd_wq#125 ->(wq_completion)nfc2_nci_cmd_wq#741 ->(wq_completion)nfc2_nci_cmd_wq#742 ->(wq_completion)nfc4_nci_cmd_wq#126 ->(wq_completion)nfc5_nci_cmd_wq#32 ->(wq_completion)nfc3_nci_cmd_wq#417 ->(wq_completion)nfc2_nci_cmd_wq#743 ->(wq_completion)nfc3_nci_cmd_wq#418 ->(wq_completion)nfc4_nci_cmd_wq#127 ->(wq_completion)nfc5_nci_cmd_wq#33 ->(wq_completion)nfc2_nci_cmd_wq#744 ->(wq_completion)nfc3_nci_cmd_wq#419 ->(wq_completion)nfc4_nci_cmd_wq#128 ->(wq_completion)nfc2_nci_cmd_wq#745 ->(wq_completion)nfc5_nci_cmd_wq#34 ->(wq_completion)nfc2_nci_cmd_wq#746 ->(wq_completion)nfc3_nci_cmd_wq#420 ->(wq_completion)nfc2_nci_cmd_wq#747 ->(wq_completion)nfc3_nci_cmd_wq#421 ->(wq_completion)nfc2_nci_cmd_wq#748 ->(wq_completion)nfc2_nci_cmd_wq#749 ->(wq_completion)nfc2_nci_cmd_wq#750 ->(wq_completion)nfc3_nci_cmd_wq#422 ->(wq_completion)nfc2_nci_cmd_wq#751 ->(wq_completion)nfc3_nci_cmd_wq#423 ->(wq_completion)nfc2_nci_cmd_wq#752 ->(wq_completion)nfc3_nci_cmd_wq#424 ->(wq_completion)nfc2_nci_cmd_wq#753 ->(wq_completion)nfc3_nci_cmd_wq#425 ->(wq_completion)nfc2_nci_cmd_wq#754 ->(wq_completion)nfc3_nci_cmd_wq#426 ->(wq_completion)nfc2_nci_cmd_wq#755 ->(wq_completion)nfc4_nci_cmd_wq#129 ->(wq_completion)nfc3_nci_cmd_wq#427 ->(wq_completion)nfc2_nci_cmd_wq#756 ->(wq_completion)nfc2_nci_cmd_wq#757 ->(wq_completion)nfc3_nci_cmd_wq#428 ->(wq_completion)nfc2_nci_cmd_wq#758 ->(wq_completion)nfc3_nci_cmd_wq#429 ->(wq_completion)nfc4_nci_cmd_wq#130 ->(wq_completion)nfc2_nci_cmd_wq#759 ->(wq_completion)nfc3_nci_cmd_wq#430 ->(wq_completion)nfc4_nci_cmd_wq#131 ->(wq_completion)nfc2_nci_cmd_wq#760 ->(wq_completion)nfc3_nci_cmd_wq#431 ->(wq_completion)nfc2_nci_cmd_wq#761 ->(wq_completion)nfc3_nci_cmd_wq#432 ->(wq_completion)nfc2_nci_cmd_wq#762 ->(wq_completion)nfc4_nci_cmd_wq#132 ->(wq_completion)nfc2_nci_cmd_wq#763 ->(wq_completion)nfc3_nci_cmd_wq#433 ->(wq_completion)nfc2_nci_cmd_wq#764 ->(wq_completion)nfc3_nci_cmd_wq#434 ->(wq_completion)nfc2_nci_cmd_wq#765 ->(wq_completion)nfc3_nci_cmd_wq#435 ->(wq_completion)nfc2_nci_cmd_wq#766 ->(wq_completion)nfc2_nci_cmd_wq#767 ->(wq_completion)nfc3_nci_cmd_wq#436 ->(wq_completion)nfc2_nci_cmd_wq#768 ->(wq_completion)nfc3_nci_cmd_wq#437 ->(wq_completion)nfc5_nci_cmd_wq#35 ->(wq_completion)nfc4_nci_cmd_wq#133 ->(wq_completion)nfc2_nci_cmd_wq#769 ->(wq_completion)nfc2_nci_cmd_wq#770 ->(wq_completion)nfc3_nci_cmd_wq#438 ->(wq_completion)nfc2_nci_cmd_wq#771 ->(wq_completion)nfc3_nci_cmd_wq#439 ->(wq_completion)nfc2_nci_cmd_wq#772 ->(wq_completion)nfc3_nci_cmd_wq#440 ->(wq_completion)nfc4_nci_cmd_wq#134 ->(wq_completion)nfc2_nci_cmd_wq#773 ->(wq_completion)nfc2_nci_cmd_wq#774 ->(wq_completion)nfc3_nci_cmd_wq#441 ->(wq_completion)nfc4_nci_cmd_wq#135 ->(wq_completion)nfc2_nci_cmd_wq#775 ->(wq_completion)nfc3_nci_cmd_wq#442 ->(wq_completion)nfc2_nci_cmd_wq#776 ->(wq_completion)nfc2_nci_cmd_wq#777 ->(wq_completion)nfc2_nci_cmd_wq#778 ->(wq_completion)nfc3_nci_cmd_wq#443 ->(wq_completion)nfc4_nci_cmd_wq#136 ->(wq_completion)nfc2_nci_cmd_wq#779 ->(wq_completion)nfc3_nci_cmd_wq#444 ->(wq_completion)nfc2_nci_cmd_wq#780 ->(wq_completion)nfc3_nci_cmd_wq#445 ->(wq_completion)nfc2_nci_cmd_wq#781 ->(wq_completion)nfc2_nci_cmd_wq#782 ->(wq_completion)nfc3_nci_cmd_wq#446 ->(wq_completion)nfc2_nci_cmd_wq#783 ->(wq_completion)nfc2_nci_cmd_wq#784 ->(wq_completion)nfc2_nci_cmd_wq#785 ->(wq_completion)nfc2_nci_cmd_wq#786 ->(wq_completion)nfc3_nci_cmd_wq#447 ->(wq_completion)nfc2_nci_cmd_wq#787 ->(wq_completion)nfc2_nci_cmd_wq#788 ->(wq_completion)nfc2_nci_cmd_wq#789 ->(wq_completion)nfc3_nci_cmd_wq#448 ->(wq_completion)nfc2_nci_cmd_wq#790 ->(wq_completion)nfc3_nci_cmd_wq#449 ->(wq_completion)nfc2_nci_cmd_wq#791 ->(wq_completion)nfc3_nci_cmd_wq#450 ->(wq_completion)nfc2_nci_cmd_wq#792 ->(wq_completion)nfc2_nci_cmd_wq#793 ->(wq_completion)nfc3_nci_cmd_wq#451 ->(wq_completion)nfc4_nci_cmd_wq#137 ->(wq_completion)nfc2_nci_cmd_wq#794 ->(wq_completion)nfc2_nci_cmd_wq#795 ->(wq_completion)nfc3_nci_cmd_wq#452 ->(wq_completion)nfc2_nci_cmd_wq#796 ->(wq_completion)nfc3_nci_cmd_wq#453 ->(wq_completion)nfc2_nci_cmd_wq#797 ->(wq_completion)nfc3_nci_cmd_wq#454 ->(wq_completion)nfc4_nci_cmd_wq#138 ->(wq_completion)nfc2_nci_cmd_wq#798 ->(wq_completion)nfc3_nci_cmd_wq#455 ->(wq_completion)nfc2_nci_cmd_wq#799 ->(wq_completion)nfc4_nci_cmd_wq#139 ->(wq_completion)nfc2_nci_cmd_wq#800 ->(wq_completion)nfc2_nci_cmd_wq#801 ->(wq_completion)nfc3_nci_cmd_wq#456 ->(wq_completion)nfc2_nci_cmd_wq#802 ->(wq_completion)nfc3_nci_cmd_wq#457 ->(wq_completion)nfc4_nci_cmd_wq#140 ->(wq_completion)nfc2_nci_cmd_wq#803 ->(wq_completion)nfc3_nci_cmd_wq#458 ->(wq_completion)nfc4_nci_cmd_wq#141 ->(wq_completion)nfc2_nci_cmd_wq#804 ->(wq_completion)nfc2_nci_cmd_wq#805 ->(wq_completion)nfc2_nci_cmd_wq#806 ->(wq_completion)nfc2_nci_cmd_wq#807 ->(wq_completion)nfc4_nci_cmd_wq#142 ->(wq_completion)nfc3_nci_cmd_wq#459 ->(wq_completion)nfc2_nci_cmd_wq#808 ->(wq_completion)nfc3_nci_cmd_wq#460 ->(wq_completion)nfc2_nci_cmd_wq#809 ->(wq_completion)nfc4_nci_cmd_wq#143 ->(wq_completion)nfc5_nci_cmd_wq#36 ->(wq_completion)nfc2_nci_cmd_wq#810 ->(wq_completion)nfc3_nci_cmd_wq#461 ->(wq_completion)nfc2_nci_cmd_wq#811 ->(wq_completion)nfc4_nci_cmd_wq#144 ->(wq_completion)nfc2_nci_cmd_wq#812 ->(wq_completion)nfc2_nci_cmd_wq#813 ->(wq_completion)nfc3_nci_cmd_wq#462 ->(wq_completion)nfc3_nci_cmd_wq#463 ->(wq_completion)nfc2_nci_cmd_wq#814 ->(wq_completion)nfc3_nci_cmd_wq#464 ->(wq_completion)nfc2_nci_cmd_wq#815 ->(wq_completion)nfc4_nci_cmd_wq#145 ->(wq_completion)nfc2_nci_cmd_wq#816 ->(wq_completion)nfc2_nci_cmd_wq#817 ->(wq_completion)nfc3_nci_cmd_wq#465 ->(wq_completion)nfc4_nci_cmd_wq#146 ->(wq_completion)nfc2_nci_cmd_wq#818 ->(wq_completion)nfc3_nci_cmd_wq#466 ->(wq_completion)nfc2_nci_cmd_wq#819 ->(wq_completion)nfc3_nci_cmd_wq#467 ->(wq_completion)nfc4_nci_cmd_wq#147 ->(wq_completion)nfc2_nci_cmd_wq#820 ->(wq_completion)nfc3_nci_cmd_wq#468 ->(wq_completion)nfc2_nci_cmd_wq#821 ->(wq_completion)nfc3_nci_cmd_wq#469 ->(wq_completion)nfc2_nci_cmd_wq#822 ->(wq_completion)nfc4_nci_cmd_wq#148 ->(wq_completion)nfc2_nci_cmd_wq#823 ->(wq_completion)nfc3_nci_cmd_wq#470 ->(wq_completion)nfc2_nci_cmd_wq#824 ->(wq_completion)nfc3_nci_cmd_wq#471 ->(wq_completion)nfc2_nci_cmd_wq#825 ->(wq_completion)nfc4_nci_cmd_wq#149 ->(wq_completion)nfc2_nci_cmd_wq#826 ->(wq_completion)nfc3_nci_cmd_wq#472 ->(wq_completion)nfc2_nci_cmd_wq#827 ->(wq_completion)nfc3_nci_cmd_wq#473 ->(wq_completion)nfc2_nci_cmd_wq#828 ->(wq_completion)nfc2_nci_cmd_wq#829 ->(wq_completion)nfc3_nci_cmd_wq#474 ->(wq_completion)nfc4_nci_cmd_wq#150 ->(wq_completion)nfc2_nci_cmd_wq#830 ->(wq_completion)nfc3_nci_cmd_wq#475 ->(wq_completion)nfc2_nci_cmd_wq#831 ->(wq_completion)nfc4_nci_cmd_wq#151 ->(wq_completion)nfc2_nci_cmd_wq#832 ->(wq_completion)nfc3_nci_cmd_wq#476 ->(wq_completion)nfc2_nci_cmd_wq#833 ->(wq_completion)nfc3_nci_cmd_wq#477 ->(wq_completion)nfc2_nci_cmd_wq#834 ->(wq_completion)nfc3_nci_cmd_wq#478 ->(wq_completion)nfc2_nci_cmd_wq#835 ->(wq_completion)nfc3_nci_cmd_wq#479 ->(wq_completion)nfc2_nci_cmd_wq#836 ->(wq_completion)nfc3_nci_cmd_wq#480 ->(wq_completion)nfc2_nci_cmd_wq#837 ->(wq_completion)nfc3_nci_cmd_wq#481 ->(wq_completion)nfc2_nci_cmd_wq#838 ->(wq_completion)nfc2_nci_cmd_wq#839 ->(wq_completion)nfc4_nci_cmd_wq#152 ->(wq_completion)nfc3_nci_cmd_wq#482 ->(wq_completion)nfc2_nci_cmd_wq#840 ->(wq_completion)nfc3_nci_cmd_wq#483 ->(wq_completion)nfc4_nci_cmd_wq#153 ->(wq_completion)nfc2_nci_cmd_wq#841 ->(wq_completion)nfc2_nci_cmd_wq#842 ->(wq_completion)nfc3_nci_cmd_wq#484 ->(wq_completion)nfc4_nci_cmd_wq#154 ->(wq_completion)nfc3_nci_cmd_wq#485 ->(wq_completion)nfc2_nci_cmd_wq#843 ->(wq_completion)nfc3_nci_cmd_wq#486 ->(wq_completion)nfc4_nci_cmd_wq#155 ->(wq_completion)nfc2_nci_cmd_wq#844 ->(wq_completion)nfc3_nci_cmd_wq#487 ->(wq_completion)nfc2_nci_cmd_wq#845 ->(wq_completion)nfc3_nci_cmd_wq#488 ->(wq_completion)nfc2_nci_cmd_wq#846 ->(wq_completion)nfc2_nci_cmd_wq#847 ->(wq_completion)nfc3_nci_cmd_wq#489 ->(wq_completion)nfc4_nci_cmd_wq#156 ->(wq_completion)nfc2_nci_cmd_wq#848 ->(wq_completion)nfc3_nci_cmd_wq#490 ->(wq_completion)nfc2_nci_cmd_wq#849 ->(wq_completion)nfc3_nci_cmd_wq#491 ->(wq_completion)nfc2_nci_cmd_wq#850 ->(wq_completion)nfc3_nci_cmd_wq#492 ->(wq_completion)nfc2_nci_cmd_wq#851 ->(wq_completion)nfc3_nci_cmd_wq#493 ->(wq_completion)nfc2_nci_cmd_wq#852 ->(wq_completion)nfc3_nci_cmd_wq#494 ->(wq_completion)nfc2_nci_cmd_wq#853 ->(wq_completion)nfc3_nci_cmd_wq#495 ->(wq_completion)nfc2_nci_cmd_wq#854 ->(wq_completion)nfc3_nci_cmd_wq#496 ->(wq_completion)nfc2_nci_cmd_wq#855 ->(wq_completion)nfc3_nci_cmd_wq#497 ->(wq_completion)nfc2_nci_cmd_wq#856 ->(wq_completion)nfc4_nci_cmd_wq#157 ->(wq_completion)nfc2_nci_cmd_wq#857 ->(wq_completion)nfc3_nci_cmd_wq#498 ->(wq_completion)nfc2_nci_cmd_wq#858 ->(wq_completion)nfc3_nci_cmd_wq#499 ->(wq_completion)nfc2_nci_cmd_wq#859 ->(wq_completion)nfc3_nci_cmd_wq#500 ->(wq_completion)nfc2_nci_cmd_wq#860 ->(wq_completion)nfc3_nci_cmd_wq#501 ->(wq_completion)nfc2_nci_cmd_wq#861 ->(wq_completion)nfc3_nci_cmd_wq#502 ->(wq_completion)nfc4_nci_cmd_wq#158 ->(wq_completion)nfc2_nci_cmd_wq#862 ->(wq_completion)nfc2_nci_cmd_wq#863 ->(wq_completion)nfc3_nci_cmd_wq#503 ->(wq_completion)nfc2_nci_cmd_wq#864 ->(wq_completion)nfc3_nci_cmd_wq#504 ->(wq_completion)nfc2_nci_cmd_wq#865 ->(wq_completion)nfc2_nci_cmd_wq#866 ->(wq_completion)nfc2_nci_cmd_wq#867 ->(wq_completion)nfc2_nci_cmd_wq#868 ->(wq_completion)nfc4_nci_cmd_wq#159 ->(wq_completion)nfc2_nci_cmd_wq#869 ->(wq_completion)nfc3_nci_cmd_wq#505 ->(wq_completion)nfc2_nci_cmd_wq#870 ->(wq_completion)nfc2_nci_cmd_wq#871 ->(wq_completion)nfc3_nci_cmd_wq#506 ->(wq_completion)nfc2_nci_cmd_wq#872 ->(wq_completion)nfc3_nci_cmd_wq#507 ->(wq_completion)nfc2_nci_cmd_wq#873 ->(wq_completion)nfc2_nci_cmd_wq#874 ->(wq_completion)nfc3_nci_cmd_wq#508 ->(wq_completion)nfc2_nci_cmd_wq#875 ->(wq_completion)nfc3_nci_cmd_wq#509 ->(wq_completion)nfc4_nci_cmd_wq#160 ->(wq_completion)nfc2_nci_cmd_wq#876 ->(wq_completion)nfc3_nci_cmd_wq#510 ->(wq_completion)nfc4_nci_cmd_wq#161 ->(wq_completion)nfc2_nci_cmd_wq#877 ->(wq_completion)nfc2_nci_cmd_wq#878 ->(wq_completion)nfc3_nci_cmd_wq#511 ->(wq_completion)nfc2_nci_cmd_wq#879 ->(wq_completion)nfc3_nci_cmd_wq#512 ->(wq_completion)nfc2_nci_cmd_wq#880 ->(wq_completion)nfc3_nci_cmd_wq#513 ->(wq_completion)nfc2_nci_cmd_wq#881 ->(wq_completion)nfc3_nci_cmd_wq#514 ->(wq_completion)nfc2_nci_cmd_wq#882 ->(wq_completion)nfc3_nci_cmd_wq#515 ->(wq_completion)nfc4_nci_cmd_wq#162 ->(wq_completion)nfc2_nci_cmd_wq#883 ->(wq_completion)nfc2_nci_cmd_wq#884 ->(wq_completion)nfc3_nci_cmd_wq#516 ->(wq_completion)nfc2_nci_cmd_wq#885 ->(wq_completion)nfc3_nci_cmd_wq#517 ->(wq_completion)nfc2_nci_cmd_wq#886 ->(wq_completion)nfc4_nci_cmd_wq#163 ->(wq_completion)nfc2_nci_cmd_wq#887 ->(wq_completion)nfc2_nci_cmd_wq#888 ->(wq_completion)nfc3_nci_cmd_wq#518 ->(wq_completion)nfc2_nci_cmd_wq#889 ->(wq_completion)nfc3_nci_cmd_wq#519 ->(wq_completion)nfc4_nci_cmd_wq#164 ->(wq_completion)nfc2_nci_cmd_wq#890 ->(wq_completion)nfc2_nci_cmd_wq#891 ->(wq_completion)nfc2_nci_cmd_wq#892 ->(wq_completion)nfc3_nci_cmd_wq#520 ->(wq_completion)nfc2_nci_cmd_wq#893 ->(wq_completion)nfc3_nci_cmd_wq#521 ->(wq_completion)nfc2_nci_cmd_wq#894 ->(wq_completion)nfc2_nci_cmd_wq#895 ->(wq_completion)nfc3_nci_cmd_wq#522 ->(wq_completion)nfc4_nci_cmd_wq#165 ->(wq_completion)nfc3_nci_cmd_wq#523 ->(wq_completion)nfc2_nci_cmd_wq#896 ->(wq_completion)nfc2_nci_cmd_wq#897 ->(wq_completion)nfc3_nci_cmd_wq#524 ->(wq_completion)nfc4_nci_cmd_wq#166 ->(wq_completion)nfc3_nci_cmd_wq#525 ->(wq_completion)nfc2_nci_cmd_wq#898 ->(wq_completion)nfc2_nci_cmd_wq#899 ->(wq_completion)nfc3_nci_cmd_wq#526 ->(wq_completion)nfc2_nci_cmd_wq#900 ->(wq_completion)nfc3_nci_cmd_wq#527 ->(wq_completion)nfc4_nci_cmd_wq#167 ->(wq_completion)nfc2_nci_cmd_wq#901 ->(wq_completion)nfc3_nci_cmd_wq#528 ->(wq_completion)nfc2_nci_cmd_wq#902 ->(wq_completion)nfc3_nci_cmd_wq#529 ->(wq_completion)nfc2_nci_cmd_wq#903 ->(wq_completion)nfc2_nci_cmd_wq#904 ->(wq_completion)nfc2_nci_cmd_wq#905 ->(wq_completion)nfc3_nci_cmd_wq#530 ->(wq_completion)nfc2_nci_cmd_wq#906 ->(wq_completion)nfc3_nci_cmd_wq#531 ->(wq_completion)nfc2_nci_cmd_wq#907 ->(wq_completion)nfc2_nci_cmd_wq#908 ->(wq_completion)nfc2_nci_cmd_wq#909 ->(wq_completion)nfc3_nci_cmd_wq#532 ->(wq_completion)nfc4_nci_cmd_wq#168 ->(wq_completion)nfc2_nci_cmd_wq#910 ->(wq_completion)nfc2_nci_cmd_wq#911 ->(wq_completion)nfc3_nci_cmd_wq#533 ->(wq_completion)nfc2_nci_cmd_wq#912 ->(wq_completion)nfc4_nci_cmd_wq#169 ->(wq_completion)nfc2_nci_cmd_wq#913 ->(wq_completion)nfc2_nci_cmd_wq#914 ->(wq_completion)nfc2_nci_cmd_wq#915 ->(wq_completion)nfc3_nci_cmd_wq#534 ->(wq_completion)nfc4_nci_cmd_wq#170 ->(wq_completion)nfc2_nci_cmd_wq#916 ->(wq_completion)nfc3_nci_cmd_wq#535 ->(wq_completion)nfc2_nci_cmd_wq#917 ->(wq_completion)nfc3_nci_cmd_wq#536 ->(wq_completion)nfc2_nci_cmd_wq#918 ->(wq_completion)nfc2_nci_cmd_wq#919 ->(wq_completion)nfc3_nci_cmd_wq#537 ->(wq_completion)nfc2_nci_cmd_wq#920 ->(wq_completion)nfc2_nci_cmd_wq#921 ->(wq_completion)nfc3_nci_cmd_wq#538 ->(wq_completion)nfc2_nci_cmd_wq#922 ->(wq_completion)nfc2_nci_cmd_wq#923 ->(wq_completion)nfc2_nci_cmd_wq#924 ->(wq_completion)nfc3_nci_cmd_wq#539 ->(wq_completion)nfc4_nci_cmd_wq#171 ->(wq_completion)nfc2_nci_cmd_wq#925 ->(wq_completion)nfc3_nci_cmd_wq#540 ->(wq_completion)nfc2_nci_cmd_wq#926 ->(wq_completion)nfc2_nci_cmd_wq#927 ->(wq_completion)nfc3_nci_cmd_wq#541 ->(wq_completion)nfc2_nci_cmd_wq#928 ->(wq_completion)nfc3_nci_cmd_wq#542 ->(wq_completion)nfc2_nci_cmd_wq#929 ->(wq_completion)nfc2_nci_cmd_wq#930 ->(wq_completion)nfc2_nci_cmd_wq#931 ->(wq_completion)nfc3_nci_cmd_wq#543 ->(wq_completion)nfc2_nci_cmd_wq#932 ->(wq_completion)nfc2_nci_cmd_wq#933 ->(wq_completion)nfc2_nci_cmd_wq#934 ->(wq_completion)nfc7_nci_cmd_wq#5 ->(wq_completion)nfc8_nci_cmd_wq#5 ->(wq_completion)nfc11_nci_cmd_wq#5 ->(wq_completion)nfc12_nci_cmd_wq#5 ->(wq_completion)nfc12_nci_cmd_wq#6 ->(wq_completion)nfc14_nci_cmd_wq#5 ->(wq_completion)nfc21_nci_cmd_wq#7 ->(wq_completion)nfc22_nci_cmd_wq#5 ->(wq_completion)nfc24_nci_cmd_wq#4 ->(wq_completion)nfc28_nci_cmd_wq#4 ->(wq_completion)nfc28_nci_cmd_wq#5 ->(wq_completion)nfc30_nci_cmd_wq#4 ->(wq_completion)nfc33_nci_cmd_wq#4 ->(wq_completion)nfc34_nci_cmd_wq#4 ->(wq_completion)nfc34_nci_cmd_wq#5 ->(wq_completion)nfc34_nci_cmd_wq#6 ->(wq_completion)nfc3_nci_cmd_wq#544 ->(wq_completion)nfc2_nci_cmd_wq#935 ->(wq_completion)nfc4_nci_cmd_wq#172 ->(wq_completion)nfc5_nci_cmd_wq#37 ->(wq_completion)nfc6_nci_cmd_wq#7 ->(wq_completion)nfc9_nci_cmd_wq#6 ->(wq_completion)nfc34_nci_cmd_wq#7 ->(wq_completion)nfc7_nci_cmd_wq#6 ->(wq_completion)nfc8_nci_cmd_wq#6 ->(wq_completion)nfc4_nci_cmd_wq#173 ->(wq_completion)nfc10_nci_cmd_wq#5 ->(wq_completion)nfc13_nci_cmd_wq#4 ->(wq_completion)nfc11_nci_cmd_wq#6 ->(wq_completion)nfc15_nci_cmd_wq#5 ->(wq_completion)nfc7_nci_cmd_wq#7 ->(wq_completion)nfc12_nci_cmd_wq#7 ->(wq_completion)nfc9_nci_cmd_wq#7 ->(wq_completion)nfc16_nci_cmd_wq#6 ->(wq_completion)nfc17_nci_cmd_wq#8 ->(wq_completion)nfc18_nci_cmd_wq#6 ->(wq_completion)nfc14_nci_cmd_wq#6 ->(wq_completion)nfc19_nci_cmd_wq#4 ->(wq_completion)nfc20_nci_cmd_wq#4 ->(wq_completion)nfc23_nci_cmd_wq#6 ->(wq_completion)nfc21_nci_cmd_wq#8 ->(wq_completion)nfc22_nci_cmd_wq#6 ->(wq_completion)nfc25_nci_cmd_wq#7 ->(wq_completion)nfc26_nci_cmd_wq#6 ->(wq_completion)nfc24_nci_cmd_wq#5 ->(wq_completion)nfc27_nci_cmd_wq#5 ->(wq_completion)nfc2_nci_cmd_wq#936 ->(wq_completion)nfc3_nci_cmd_wq#545 ->(wq_completion)nfc5_nci_cmd_wq#38 ->(wq_completion)nfc4_nci_cmd_wq#174 ->(wq_completion)nfc6_nci_cmd_wq#8 ->(wq_completion)nfc8_nci_cmd_wq#7 ->(wq_completion)nfc10_nci_cmd_wq#6 ->(wq_completion)nfc7_nci_cmd_wq#8 ->(wq_completion)nfc9_nci_cmd_wq#8 ->(wq_completion)nfc11_nci_cmd_wq#7 ->(wq_completion)nfc12_nci_cmd_wq#8 ->(wq_completion)nfc13_nci_cmd_wq#5 ->(wq_completion)nfc14_nci_cmd_wq#7 ->(wq_completion)nfc15_nci_cmd_wq#6 ->(wq_completion)nfc16_nci_cmd_wq#7 ->(wq_completion)nfc17_nci_cmd_wq#9 ->(wq_completion)nfc18_nci_cmd_wq#7 ->(wq_completion)nfc41_nci_cmd_wq#2 ->(wq_completion)nfc40_nci_cmd_wq#2 ->(wq_completion)nfc39_nci_cmd_wq#2 ->(wq_completion)nfc38_nci_cmd_wq#2 ->(wq_completion)nfc37_nci_cmd_wq#2 ->(wq_completion)nfc36_nci_cmd_wq#2 ->(wq_completion)nfc35_nci_cmd_wq#2 ->(wq_completion)nfc33_nci_cmd_wq#5 ->(wq_completion)nfc30_nci_cmd_wq#5 ->(wq_completion)nfc28_nci_cmd_wq#6 ->(wq_completion)nfc32_nci_cmd_wq#4 ->(wq_completion)nfc31_nci_cmd_wq#6 ->(wq_completion)nfc29_nci_cmd_wq#4 ->(wq_completion)nfc7_nci_cmd_wq#9 ->(wq_completion)nfc10_nci_cmd_wq#7 ->(wq_completion)nfc8_nci_cmd_wq#8 ->(wq_completion)nfc6_nci_cmd_wq#9 ->(wq_completion)nfc5_nci_cmd_wq#39 ->(wq_completion)nfc4_nci_cmd_wq#175 ->(wq_completion)nfc3_nci_cmd_wq#546 ->(wq_completion)nfc2_nci_cmd_wq#937 ->(wq_completion)nfc19_nci_cmd_wq#5 ->(wq_completion)nfc2_nci_cmd_wq#938 ->(wq_completion)nfc4_nci_cmd_wq#176 ->(wq_completion)nfc3_nci_cmd_wq#547 ->(wq_completion)nfc2_nci_cmd_wq#939 ->(wq_completion)nfc4_nci_cmd_wq#177 ->(wq_completion)nfc2_nci_cmd_wq#940 ->(wq_completion)nfc4_nci_cmd_wq#178 ->(wq_completion)nfc3_nci_cmd_wq#548 ->(wq_completion)nfc2_nci_cmd_wq#941 ->(wq_completion)nfc2_nci_cmd_wq#942 ->(wq_completion)nfc3_nci_cmd_wq#549 ->(wq_completion)nfc2_nci_cmd_wq#943 ->(wq_completion)nfc3_nci_cmd_wq#550 ->(wq_completion)nfc4_nci_cmd_wq#179 ->(wq_completion)nfc2_nci_cmd_wq#944 ->(wq_completion)nfc3_nci_cmd_wq#551 ->(wq_completion)nfc2_nci_cmd_wq#945 ->(wq_completion)nfc3_nci_cmd_wq#552 ->(wq_completion)nfc3_nci_cmd_wq#553 ->(wq_completion)nfc3_nci_cmd_wq#554 ->(wq_completion)nfc2_nci_cmd_wq#946 ->(wq_completion)nfc4_nci_cmd_wq#180 ->(wq_completion)nfc5_nci_cmd_wq#40 ->(wq_completion)nfc2_nci_cmd_wq#947 ->(wq_completion)nfc3_nci_cmd_wq#555 ->(wq_completion)nfc2_nci_cmd_wq#948 ->(wq_completion)nfc3_nci_cmd_wq#556 ->(wq_completion)nfc2_nci_cmd_wq#949 ->(wq_completion)nfc3_nci_cmd_wq#557 ->(wq_completion)nfc4_nci_cmd_wq#181 ->(wq_completion)nfc2_nci_cmd_wq#950 ->(wq_completion)nfc2_nci_cmd_wq#951 ->(wq_completion)nfc3_nci_cmd_wq#558 ->(wq_completion)nfc2_nci_cmd_wq#952 ->(wq_completion)nfc3_nci_cmd_wq#559 ->(wq_completion)nfc4_nci_cmd_wq#182 ->(wq_completion)nfc2_nci_cmd_wq#953 ->(wq_completion)nfc3_nci_cmd_wq#560 ->(wq_completion)nfc2_nci_cmd_wq#954 ->(wq_completion)nfc2_nci_cmd_wq#955 ->(wq_completion)nfc3_nci_cmd_wq#561 ->(wq_completion)nfc4_nci_cmd_wq#183 ->(wq_completion)nfc2_nci_cmd_wq#956 ->(wq_completion)nfc2_nci_cmd_wq#957 ->(wq_completion)nfc3_nci_cmd_wq#562 ->(wq_completion)nfc2_nci_cmd_wq#958 ->(wq_completion)nfc3_nci_cmd_wq#563 ->(wq_completion)nfc4_nci_cmd_wq#184 ->(wq_completion)nfc3_nci_cmd_wq#564 ->(wq_completion)nfc2_nci_cmd_wq#959 ->(wq_completion)nfc4_nci_cmd_wq#185 ->(wq_completion)nfc5_nci_cmd_wq#41 ->(wq_completion)nfc2_nci_cmd_wq#960 ->(wq_completion)nfc2_nci_cmd_wq#961 ->(wq_completion)nfc3_nci_cmd_wq#565 ->(wq_completion)nfc4_nci_cmd_wq#186 ->(wq_completion)nfc2_nci_cmd_wq#962 ->(wq_completion)nfc2_nci_cmd_wq#963 ->(wq_completion)nfc3_nci_cmd_wq#566 ->(wq_completion)nfc2_nci_cmd_wq#964 ->(wq_completion)nfc4_nci_cmd_wq#187 ->(wq_completion)nfc5_nci_cmd_wq#42 ->(wq_completion)nfc3_nci_cmd_wq#567 ->(wq_completion)nfc3_nci_cmd_wq#568 ->(wq_completion)nfc2_nci_cmd_wq#965 ->(wq_completion)nfc2_nci_cmd_wq#966 ->(wq_completion)nfc3_nci_cmd_wq#569 ->(wq_completion)nfc2_nci_cmd_wq#967 ->(wq_completion)nfc4_nci_cmd_wq#188 ->(wq_completion)nfc2_nci_cmd_wq#968 ->(wq_completion)nfc3_nci_cmd_wq#570 ->(wq_completion)nfc2_nci_cmd_wq#969 ->(wq_completion)nfc3_nci_cmd_wq#571 ->(wq_completion)nfc2_nci_cmd_wq#970 ->(wq_completion)nfc2_nci_cmd_wq#971 ->(wq_completion)nfc3_nci_cmd_wq#572 ->(wq_completion)nfc4_nci_cmd_wq#189 ->(wq_completion)nfc2_nci_cmd_wq#972 ->(wq_completion)nfc2_nci_cmd_wq#973 ->(wq_completion)nfc2_nci_cmd_wq#974 ->(wq_completion)nfc5_nci_cmd_wq#43 ->(wq_completion)nfc3_nci_cmd_wq#573 ->(wq_completion)nfc4_nci_cmd_wq#190 ->(wq_completion)nfc2_nci_cmd_wq#975 ->(wq_completion)nfc5_nci_cmd_wq#44 ->(wq_completion)nfc2_nci_cmd_wq#976 ->(wq_completion)nfc3_nci_cmd_wq#574 ->(wq_completion)nfc2_nci_cmd_wq#977 ->(wq_completion)nfc3_nci_cmd_wq#575 ->(wq_completion)nfc2_nci_cmd_wq#978 ->(wq_completion)nfc3_nci_cmd_wq#576 ->(wq_completion)nfc2_nci_cmd_wq#979 ->(wq_completion)nfc4_nci_cmd_wq#191 ->(wq_completion)nfc2_nci_cmd_wq#980 ->(wq_completion)nfc3_nci_cmd_wq#577 ->(wq_completion)nfc2_nci_cmd_wq#981 ->(wq_completion)nfc3_nci_cmd_wq#578 ->(wq_completion)nfc2_nci_cmd_wq#982 ->(wq_completion)nfc3_nci_cmd_wq#579 ->(wq_completion)nfc2_nci_cmd_wq#983 ->(wq_completion)nfc3_nci_cmd_wq#580 ->(wq_completion)nfc4_nci_cmd_wq#192 ->(wq_completion)nfc2_nci_cmd_wq#984 ->(wq_completion)nfc3_nci_cmd_wq#581 ->(wq_completion)nfc2_nci_cmd_wq#985 ->(wq_completion)nfc6_nci_cmd_wq#10 ->(wq_completion)nfc4_nci_cmd_wq#193 ->(wq_completion)nfc8_nci_cmd_wq#9 ->(wq_completion)nfc3_nci_cmd_wq#582 ->(wq_completion)nfc2_nci_cmd_wq#986 ->(wq_completion)nfc5_nci_cmd_wq#45 ->(wq_completion)nfc7_nci_cmd_wq#10 ->(wq_completion)nfc2_nci_cmd_wq#987 ->(wq_completion)nfc3_nci_cmd_wq#583 ->(wq_completion)nfc2_nci_cmd_wq#988 ->(wq_completion)nfc4_nci_cmd_wq#194 ->(wq_completion)nfc2_nci_cmd_wq#989 ->(wq_completion)nfc3_nci_cmd_wq#584 ->(wq_completion)nfc4_nci_cmd_wq#195 ->(wq_completion)nfc6_nci_cmd_wq#11 ->(wq_completion)nfc5_nci_cmd_wq#46 ->(wq_completion)nfc2_nci_cmd_wq#990 ->(wq_completion)nfc3_nci_cmd_wq#585 ->(wq_completion)nfc2_nci_cmd_wq#991 ->(wq_completion)nfc2_nci_cmd_wq#992 ->(wq_completion)nfc3_nci_cmd_wq#586 ->(wq_completion)nfc2_nci_cmd_wq#993 ->(wq_completion)nfc4_nci_cmd_wq#196 ->(wq_completion)nfc2_nci_cmd_wq#994 ->(wq_completion)nfc2_nci_cmd_wq#995 ->(wq_completion)nfc3_nci_cmd_wq#587 ->(wq_completion)nfc2_nci_cmd_wq#996 ->(wq_completion)nfc3_nci_cmd_wq#588 ->(wq_completion)nfc4_nci_cmd_wq#197 ->(wq_completion)nfc2_nci_cmd_wq#997 ->(wq_completion)nfc2_nci_cmd_wq#998 ->(wq_completion)nfc3_nci_cmd_wq#589 ->(wq_completion)nfc4_nci_cmd_wq#198 ->(wq_completion)nfc2_nci_cmd_wq#999 ->(wq_completion)nfc3_nci_cmd_wq#590 ->(wq_completion)nfc3_nci_cmd_wq#591 ->(wq_completion)nfc2_nci_cmd_wq#1000 ->(wq_completion)nfc2_nci_cmd_wq#1001 ->(wq_completion)nfc3_nci_cmd_wq#592 ->(wq_completion)nfc2_nci_cmd_wq#1002 ->(wq_completion)nfc3_nci_cmd_wq#593 ->(wq_completion)nfc2_nci_cmd_wq#1003 ->(wq_completion)nfc3_nci_cmd_wq#594 ->(wq_completion)nfc2_nci_cmd_wq#1004 ->(wq_completion)nfc4_nci_cmd_wq#199 ->(wq_completion)nfc2_nci_cmd_wq#1005 ->(wq_completion)nfc2_nci_cmd_wq#1006 ->(wq_completion)nfc3_nci_cmd_wq#595 ->(wq_completion)nfc2_nci_cmd_wq#1007 ->(wq_completion)nfc3_nci_cmd_wq#596 ->(wq_completion)nfc4_nci_cmd_wq#200 ->(wq_completion)nfc2_nci_cmd_wq#1008 ->(wq_completion)nfc3_nci_cmd_wq#597 ->(wq_completion)nfc2_nci_cmd_wq#1009 ->(wq_completion)nfc3_nci_cmd_wq#598 ->(wq_completion)nfc2_nci_cmd_wq#1010 ->(wq_completion)nfc3_nci_cmd_wq#599 ->(wq_completion)nfc4_nci_cmd_wq#201 ->(wq_completion)nfc5_nci_cmd_wq#47 ->(wq_completion)nfc2_nci_cmd_wq#1011 ->(wq_completion)nfc2_nci_cmd_wq#1012 ->(wq_completion)nfc3_nci_cmd_wq#600 ->(wq_completion)nfc2_nci_cmd_wq#1013 ->(wq_completion)nfc2_nci_cmd_wq#1014 ->(wq_completion)nfc3_nci_cmd_wq#601 ->(wq_completion)nfc2_nci_cmd_wq#1015 ->(wq_completion)nfc2_nci_cmd_wq#1016 ->(wq_completion)nfc2_nci_cmd_wq#1017 ->(wq_completion)nfc2_nci_cmd_wq#1018 ->(wq_completion)nfc3_nci_cmd_wq#602 ->(wq_completion)nfc2_nci_cmd_wq#1019 ->(wq_completion)nfc3_nci_cmd_wq#603 ->(wq_completion)nfc2_nci_cmd_wq#1020 ->(wq_completion)nfc3_nci_cmd_wq#604 ->(wq_completion)nfc4_nci_cmd_wq#202 ->(wq_completion)nfc2_nci_cmd_wq#1021 ->(wq_completion)nfc3_nci_cmd_wq#605 ->(wq_completion)nfc2_nci_cmd_wq#1022 ->(wq_completion)nfc3_nci_cmd_wq#606 ->(wq_completion)nfc4_nci_cmd_wq#203 ->(wq_completion)nfc3_nci_cmd_wq#607 ->(wq_completion)nfc4_nci_cmd_wq#204 ->(wq_completion)nfc3_nci_cmd_wq#608 ->(wq_completion)nfc2_nci_cmd_wq#1023 ->(wq_completion)nfc2_nci_cmd_wq#1024 ->(wq_completion)nfc2_nci_cmd_wq#1025 ->(wq_completion)nfc3_nci_cmd_wq#609 ->(wq_completion)nfc2_nci_cmd_wq#1026 ->(wq_completion)nfc3_nci_cmd_wq#610 ->(wq_completion)nfc2_nci_cmd_wq#1027 ->(wq_completion)nfc4_nci_cmd_wq#205 ->(wq_completion)nfc2_nci_cmd_wq#1028 ->(wq_completion)nfc2_nci_cmd_wq#1029 ->(wq_completion)nfc3_nci_cmd_wq#611 ->(wq_completion)nfc2_nci_cmd_wq#1030 ->(wq_completion)nfc3_nci_cmd_wq#612 ->(wq_completion)nfc4_nci_cmd_wq#206 ->(wq_completion)nfc5_nci_cmd_wq#48 ->(wq_completion)nfc2_nci_cmd_wq#1031 ->(wq_completion)nfc2_nci_cmd_wq#1032 ->(wq_completion)nfc3_nci_cmd_wq#613 ->(wq_completion)nfc5_nci_cmd_wq#49 ->(wq_completion)nfc4_nci_cmd_wq#207 ->(wq_completion)nfc2_nci_cmd_wq#1033 ->(wq_completion)nfc3_nci_cmd_wq#614 ->(wq_completion)nfc4_nci_cmd_wq#208 ->(wq_completion)nfc2_nci_cmd_wq#1034 ->(wq_completion)nfc3_nci_cmd_wq#615 ->(wq_completion)nfc4_nci_cmd_wq#209 ->(wq_completion)nfc5_nci_cmd_wq#50 ->(wq_completion)nfc2_nci_cmd_wq#1035 ->(wq_completion)nfc3_nci_cmd_wq#616 ->(wq_completion)nfc4_nci_cmd_wq#210 ->(wq_completion)nfc2_nci_cmd_wq#1036 ->(wq_completion)nfc3_nci_cmd_wq#617 ->(wq_completion)nfc7_nci_cmd_wq#11 ->(wq_completion)nfc4_nci_cmd_wq#211 ->(wq_completion)nfc6_nci_cmd_wq#12 ->(wq_completion)nfc5_nci_cmd_wq#51 ->(wq_completion)nfc2_nci_cmd_wq#1037 ->(wq_completion)nfc2_nci_cmd_wq#1038 ->(wq_completion)nfc3_nci_cmd_wq#618 ->(wq_completion)nfc4_nci_cmd_wq#212 ->(wq_completion)nfc2_nci_cmd_wq#1039 ->(wq_completion)nfc2_nci_cmd_wq#1040 ->(wq_completion)nfc2_nci_cmd_wq#1041 ->(wq_completion)nfc3_nci_cmd_wq#619 ->(wq_completion)nfc4_nci_cmd_wq#213 ->(wq_completion)nfc3_nci_cmd_wq#620 ->(wq_completion)nfc2_nci_cmd_wq#1042 ->(wq_completion)nfc5_nci_cmd_wq#52 ->(wq_completion)nfc2_nci_cmd_wq#1043 ->(wq_completion)nfc3_nci_cmd_wq#621 ->(wq_completion)nfc2_nci_cmd_wq#1044 ->(wq_completion)nfc3_nci_cmd_wq#622 ->(wq_completion)nfc4_nci_cmd_wq#214 ->(wq_completion)nfc2_nci_cmd_wq#1045 ->(wq_completion)nfc3_nci_cmd_wq#623 ->(wq_completion)nfc2_nci_cmd_wq#1046 ->(wq_completion)nfc6_nci_cmd_wq#13 ->(wq_completion)nfc6_nci_cmd_wq#14 ->(wq_completion)nfc11_nci_cmd_wq#8 ->(wq_completion)nfc11_nci_cmd_wq#9 ->(wq_completion)nfc12_nci_cmd_wq#9 ->(wq_completion)nfc14_nci_cmd_wq#8 ->(wq_completion)nfc15_nci_cmd_wq#7 ->(wq_completion)nfc16_nci_cmd_wq#8 ->(wq_completion)nfc18_nci_cmd_wq#8 ->(wq_completion)nfc20_nci_cmd_wq#5 ->(wq_completion)nfc18_nci_cmd_wq#9 ->(wq_completion)nfc26_nci_cmd_wq#7 ->(wq_completion)nfc29_nci_cmd_wq#5 ->(wq_completion)nfc4_nci_cmd_wq#215 ->(wq_completion)nfc2_nci_cmd_wq#1047 ->(wq_completion)nfc3_nci_cmd_wq#624 ->(wq_completion)nfc5_nci_cmd_wq#53 ->(wq_completion)nfc7_nci_cmd_wq#12 ->(wq_completion)nfc8_nci_cmd_wq#10 ->(wq_completion)nfc9_nci_cmd_wq#9 ->(wq_completion)nfc10_nci_cmd_wq#8 ->(wq_completion)nfc6_nci_cmd_wq#15 ->(wq_completion)nfc13_nci_cmd_wq#6 ->(wq_completion)nfc11_nci_cmd_wq#10 ->(wq_completion)nfc12_nci_cmd_wq#10 ->(wq_completion)nfc17_nci_cmd_wq#10 ->(wq_completion)nfc14_nci_cmd_wq#9 ->(wq_completion)nfc15_nci_cmd_wq#8 ->(wq_completion)nfc16_nci_cmd_wq#9 ->(wq_completion)nfc19_nci_cmd_wq#6 ->(wq_completion)nfc21_nci_cmd_wq#9 ->(wq_completion)nfc22_nci_cmd_wq#7 ->(wq_completion)nfc20_nci_cmd_wq#6 ->(wq_completion)nfc18_nci_cmd_wq#10 ->(wq_completion)nfc23_nci_cmd_wq#7 ->(wq_completion)nfc24_nci_cmd_wq#6 ->(wq_completion)nfc25_nci_cmd_wq#8 ->(wq_completion)nfc27_nci_cmd_wq#6 ->(wq_completion)nfc28_nci_cmd_wq#7 ->(wq_completion)nfc35_nci_cmd_wq#3 ->(wq_completion)nfc34_nci_cmd_wq#8 ->(wq_completion)nfc33_nci_cmd_wq#6 ->(wq_completion)nfc32_nci_cmd_wq#5 ->(wq_completion)nfc31_nci_cmd_wq#7 ->(wq_completion)nfc30_nci_cmd_wq#6 ->(wq_completion)nfc29_nci_cmd_wq#6 ->(wq_completion)nfc26_nci_cmd_wq#8 ->(wq_completion)nfc2_nci_cmd_wq#1048 ->(wq_completion)nfc3_nci_cmd_wq#625 ->(wq_completion)nfc2_nci_cmd_wq#1049 ->(wq_completion)nfc3_nci_cmd_wq#626 ->(wq_completion)nfc2_nci_cmd_wq#1050 ->(wq_completion)nfc3_nci_cmd_wq#627 ->(wq_completion)nfc2_nci_cmd_wq#1051 ->(wq_completion)nfc4_nci_cmd_wq#216 ->(wq_completion)nfc2_nci_cmd_wq#1052 ->(wq_completion)nfc2_nci_cmd_wq#1053 ->(wq_completion)nfc3_nci_cmd_wq#628 ->(wq_completion)nfc2_nci_cmd_wq#1054 ->(wq_completion)nfc3_nci_cmd_wq#629 ->(wq_completion)nfc2_nci_cmd_wq#1055 ->(wq_completion)nfc3_nci_cmd_wq#630 ->(wq_completion)nfc2_nci_cmd_wq#1056 ->(wq_completion)nfc3_nci_cmd_wq#631 ->(wq_completion)nfc2_nci_cmd_wq#1057 ->(wq_completion)nfc3_nci_cmd_wq#632 ->(wq_completion)nfc2_nci_cmd_wq#1058 ->(wq_completion)nfc2_nci_cmd_wq#1059 ->(wq_completion)nfc3_nci_cmd_wq#633 ->(wq_completion)nfc2_nci_cmd_wq#1060 ->(wq_completion)nfc4_nci_cmd_wq#217 ->(wq_completion)nfc2_nci_cmd_wq#1061 ->(wq_completion)nfc3_nci_cmd_wq#634 ->(wq_completion)nfc4_nci_cmd_wq#218 ->(wq_completion)nfc2_nci_cmd_wq#1062 ->(wq_completion)nfc5_nci_cmd_wq#54 ->(wq_completion)nfc6_nci_cmd_wq#16 ->(wq_completion)nfc5_nci_cmd_wq#55 ->(wq_completion)nfc9_nci_cmd_wq#10 ->(wq_completion)nfc11_nci_cmd_wq#11 ->(wq_completion)nfc9_nci_cmd_wq#11 ->(wq_completion)nfc20_nci_cmd_wq#7 ->(wq_completion)nfc13_nci_cmd_wq#7 ->(wq_completion)nfc15_nci_cmd_wq#9 ->(wq_completion)nfc21_nci_cmd_wq#10 ->(wq_completion)nfc18_nci_cmd_wq#11 ->(wq_completion)nfc20_nci_cmd_wq#10 ->(wq_completion)nfc19_nci_cmd_wq#7 ->(wq_completion)nfc17_nci_cmd_wq#11 ->(wq_completion)nfc24_nci_cmd_wq#7 ->(wq_completion)nfc25_nci_cmd_wq#9 ->(wq_completion)nfc28_nci_cmd_wq#8 ->(wq_completion)nfc31_nci_cmd_wq#8 ->(wq_completion)nfc27_nci_cmd_wq#7 ->(wq_completion)nfc30_nci_cmd_wq#7 ->(wq_completion)nfc28_nci_cmd_wq#9 ->(wq_completion)nfc25_nci_cmd_wq#10 ->(wq_completion)nfc30_nci_cmd_wq#8 ->(wq_completion)nfc25_nci_cmd_wq#11 ->(wq_completion)nfc27_nci_cmd_wq#8 ->(wq_completion)nfc32_nci_cmd_wq#6 ->(wq_completion)nfc29_nci_cmd_wq#7 ->(wq_completion)nfc24_nci_cmd_wq#8 ->(wq_completion)nfc26_nci_cmd_wq#9 ->(wq_completion)nfc23_nci_cmd_wq#8 ->(wq_completion)nfc20_nci_cmd_wq#11 ->(wq_completion)nfc17_nci_cmd_wq#12 ->(wq_completion)nfc19_nci_cmd_wq#8 ->(wq_completion)nfc18_nci_cmd_wq#12 ->(wq_completion)nfc21_nci_cmd_wq#11 ->(wq_completion)nfc15_nci_cmd_wq#10 ->(wq_completion)nfc13_nci_cmd_wq#8 ->(wq_completion)nfc22_nci_cmd_wq#8 ->(wq_completion)nfc16_nci_cmd_wq#10 ->(wq_completion)nfc14_nci_cmd_wq#10 ->(wq_completion)nfc12_nci_cmd_wq#11 ->(wq_completion)nfc9_nci_cmd_wq#12 ->(wq_completion)nfc11_nci_cmd_wq#12 ->(wq_completion)nfc6_nci_cmd_wq#17 ->(wq_completion)nfc5_nci_cmd_wq#56 ->(wq_completion)nfc10_nci_cmd_wq#9 ->(wq_completion)nfc8_nci_cmd_wq#11 ->(wq_completion)nfc7_nci_cmd_wq#13 ->(wq_completion)nfc2_nci_cmd_wq#1063 ->(wq_completion)nfc4_nci_cmd_wq#219 ->(wq_completion)nfc3_nci_cmd_wq#635 ->(wq_completion)nfc3_nci_cmd_wq#636 ->(wq_completion)nfc2_nci_cmd_wq#1064 ->(wq_completion)nfc4_nci_cmd_wq#220 ->(wq_completion)nfc6_nci_cmd_wq#18 ->(wq_completion)nfc4_nci_cmd_wq#221 ->(wq_completion)nfc4_nci_cmd_wq#222 ->(wq_completion)nfc9_nci_cmd_wq#13 ->(wq_completion)nfc8_nci_cmd_wq#12 ->(wq_completion)nfc2_nci_cmd_wq#1065 ->(wq_completion)nfc6_nci_cmd_wq#21 ->(wq_completion)nfc5_nci_cmd_wq#57 ->(wq_completion)nfc7_nci_cmd_wq#14 ->(wq_completion)nfc4_nci_cmd_wq#224 ->(wq_completion)nfc3_nci_cmd_wq#637 ->(wq_completion)nfc2_nci_cmd_wq#1066 ->(wq_completion)nfc3_nci_cmd_wq#638 ->(wq_completion)nfc2_nci_cmd_wq#1067 ->(wq_completion)nfc2_nci_cmd_wq#1068 ->(wq_completion)nfc3_nci_cmd_wq#639 ->(wq_completion)nfc2_nci_cmd_wq#1069 ->(wq_completion)nfc2_nci_cmd_wq#1070 ->(wq_completion)nfc3_nci_cmd_wq#640 ->(wq_completion)nfc3_nci_cmd_wq#641 ->(wq_completion)nfc4_nci_cmd_wq#225 ->(wq_completion)nfc5_nci_cmd_wq#58 ->(wq_completion)nfc2_nci_cmd_wq#1071 ->(wq_completion)nfc2_nci_cmd_wq#1072 ->(wq_completion)nfc3_nci_cmd_wq#642 ->(wq_completion)nfc4_nci_cmd_wq#226 ->(wq_completion)nfc2_nci_cmd_wq#1073 ->(wq_completion)nfc3_nci_cmd_wq#643 ->(wq_completion)nfc2_nci_cmd_wq#1074 ->(wq_completion)nfc2_nci_cmd_wq#1075 ->(wq_completion)nfc3_nci_cmd_wq#644 ->(wq_completion)nfc4_nci_cmd_wq#227 ->(wq_completion)nfc2_nci_cmd_wq#1076 ->(wq_completion)nfc3_nci_cmd_wq#645 ->(wq_completion)nfc2_nci_cmd_wq#1077 ->(wq_completion)nfc2_nci_cmd_wq#1078 ->(wq_completion)nfc2_nci_cmd_wq#1079 ->(wq_completion)nfc3_nci_cmd_wq#646 ->(wq_completion)nfc4_nci_cmd_wq#228 ->(wq_completion)nfc2_nci_cmd_wq#1080 ->(wq_completion)nfc3_nci_cmd_wq#647 ->(wq_completion)nfc2_nci_cmd_wq#1081 ->(wq_completion)nfc3_nci_cmd_wq#648 ->(wq_completion)nfc2_nci_cmd_wq#1082 ->(wq_completion)nfc3_nci_cmd_wq#649 ->(wq_completion)nfc2_nci_cmd_wq#1083 ->(wq_completion)nfc2_nci_cmd_wq#1084 ->(wq_completion)nfc3_nci_cmd_wq#650 ->(wq_completion)nfc4_nci_cmd_wq#229 ->(wq_completion)nfc2_nci_cmd_wq#1085 ->(wq_completion)nfc2_nci_cmd_wq#1086 ->(wq_completion)nfc2_nci_cmd_wq#1088 ->(wq_completion)nfc3_nci_cmd_wq#651 ->(wq_completion)nfc4_nci_cmd_wq#230 ->(wq_completion)nfc2_nci_cmd_wq#1089 ->(wq_completion)nfc3_nci_cmd_wq#652 ->(wq_completion)nfc2_nci_cmd_wq#1090 ->(wq_completion)nfc3_nci_cmd_wq#653 ->(wq_completion)nfc2_nci_cmd_wq#1091 ->(wq_completion)nfc10_nci_cmd_wq#10 ->(wq_completion)nfc15_nci_cmd_wq#11 ->(wq_completion)nfc26_nci_cmd_wq#10 ->(wq_completion)nfc2_nci_cmd_wq#1092 ->(wq_completion)nfc3_nci_cmd_wq#654 ->(wq_completion)nfc4_nci_cmd_wq#231 ->(wq_completion)nfc5_nci_cmd_wq#59 ->(wq_completion)nfc6_nci_cmd_wq#22 ->(wq_completion)nfc7_nci_cmd_wq#15 ->(wq_completion)nfc2_nci_cmd_wq#1093 ->(wq_completion)nfc8_nci_cmd_wq#13 ->(wq_completion)nfc9_nci_cmd_wq#14 ->(wq_completion)nfc11_nci_cmd_wq#13 ->(wq_completion)nfc12_nci_cmd_wq#12 ->(wq_completion)nfc10_nci_cmd_wq#11 ->(wq_completion)nfc13_nci_cmd_wq#9 ->(wq_completion)nfc14_nci_cmd_wq#11 ->(wq_completion)nfc16_nci_cmd_wq#11 ->(wq_completion)nfc15_nci_cmd_wq#12 ->(wq_completion)nfc17_nci_cmd_wq#13 ->(wq_completion)nfc2_nci_cmd_wq#1094 ->(wq_completion)nfc18_nci_cmd_wq#13 ->(wq_completion)nfc19_nci_cmd_wq#9 ->(wq_completion)nfc20_nci_cmd_wq#12 ->(wq_completion)nfc21_nci_cmd_wq#12 ->(wq_completion)nfc22_nci_cmd_wq#9 ->(wq_completion)nfc23_nci_cmd_wq#9 ->(wq_completion)nfc24_nci_cmd_wq#9 ->(wq_completion)nfc25_nci_cmd_wq#12 ->(wq_completion)nfc27_nci_cmd_wq#9 ->(wq_completion)nfc26_nci_cmd_wq#11 ->(wq_completion)nfc2_nci_cmd_wq#1095 ->(wq_completion)nfc34_nci_cmd_wq#9 ->(wq_completion)nfc33_nci_cmd_wq#7 ->(wq_completion)nfc32_nci_cmd_wq#7 ->(wq_completion)nfc31_nci_cmd_wq#9 ->(wq_completion)nfc30_nci_cmd_wq#9 ->(wq_completion)nfc29_nci_cmd_wq#8 ->(wq_completion)nfc28_nci_cmd_wq#10 ->(wq_completion)nfc2_nci_cmd_wq#1096 ->(wq_completion)nfc2_nci_cmd_wq#1097 ->(wq_completion)nfc3_nci_cmd_wq#655 ->(wq_completion)nfc2_nci_cmd_wq#1098 ->(wq_completion)nfc3_nci_cmd_wq#656 ->(wq_completion)nfc2_nci_cmd_wq#1099 ->(wq_completion)nfc3_nci_cmd_wq#657 ->(wq_completion)nfc2_nci_cmd_wq#1100 ->(wq_completion)nfc2_nci_cmd_wq#1101 ->(wq_completion)nfc2_nci_cmd_wq#1102 ->(wq_completion)nfc3_nci_cmd_wq#658 ->(wq_completion)nfc2_nci_cmd_wq#1103 ->(wq_completion)nfc3_nci_cmd_wq#659 ->(wq_completion)nfc2_nci_cmd_wq#1104 ->(wq_completion)nfc3_nci_cmd_wq#660 ->(wq_completion)nfc2_nci_cmd_wq#1105 ->(wq_completion)nfc2_nci_cmd_wq#1106 ->(wq_completion)nfc3_nci_cmd_wq#661 ->(wq_completion)nfc2_nci_cmd_wq#1107 ->(wq_completion)nfc3_nci_cmd_wq#662 ->(wq_completion)nfc2_nci_cmd_wq#1108 ->(wq_completion)nfc2_nci_cmd_wq#1109 ->(wq_completion)nfc3_nci_cmd_wq#663 ->(wq_completion)nfc2_nci_cmd_wq#1110 ->(wq_completion)nfc2_nci_cmd_wq#1111 ->(wq_completion)nfc3_nci_cmd_wq#664 ->(wq_completion)nfc2_nci_cmd_wq#1112 ->(wq_completion)nfc6_nci_cmd_wq#23 ->(wq_completion)nfc7_nci_cmd_wq#16 ->(wq_completion)nfc13_nci_cmd_wq#10 ->(wq_completion)nfc19_nci_cmd_wq#10 ->(wq_completion)nfc28_nci_cmd_wq#11 ->(wq_completion)nfc30_nci_cmd_wq#10 ->(wq_completion)nfc36_nci_cmd_wq#3 ->(wq_completion)nfc42_nci_cmd_wq ->(wq_completion)nfc2_nci_cmd_wq#1113 ->(wq_completion)nfc2_nci_cmd_wq#1114 ->(wq_completion)nfc42_nci_cmd_wq#2 ->(wq_completion)nfc43_nci_cmd_wq ->(wq_completion)nfc41_nci_cmd_wq#3 ->(wq_completion)nfc40_nci_cmd_wq#3 ->(wq_completion)nfc39_nci_cmd_wq#3 ->(wq_completion)nfc38_nci_cmd_wq#3 ->(wq_completion)nfc36_nci_cmd_wq#4 ->(wq_completion)nfc37_nci_cmd_wq#3 ->(wq_completion)nfc35_nci_cmd_wq#4 ->(wq_completion)nfc34_nci_cmd_wq#10 ->(wq_completion)nfc33_nci_cmd_wq#8 ->(wq_completion)nfc30_nci_cmd_wq#11 ->(wq_completion)nfc32_nci_cmd_wq#8 ->(wq_completion)nfc31_nci_cmd_wq#10 ->(wq_completion)nfc28_nci_cmd_wq#12 ->(wq_completion)nfc29_nci_cmd_wq#9 ->(wq_completion)nfc27_nci_cmd_wq#10 ->(wq_completion)nfc26_nci_cmd_wq#12 ->(wq_completion)nfc25_nci_cmd_wq#13 ->(wq_completion)nfc24_nci_cmd_wq#10 ->(wq_completion)nfc23_nci_cmd_wq#10 ->(wq_completion)nfc22_nci_cmd_wq#10 ->(wq_completion)nfc19_nci_cmd_wq#11 ->(wq_completion)nfc21_nci_cmd_wq#13 ->(wq_completion)nfc20_nci_cmd_wq#13 ->(wq_completion)nfc18_nci_cmd_wq#14 ->(wq_completion)nfc17_nci_cmd_wq#14 ->(wq_completion)nfc16_nci_cmd_wq#12 ->(wq_completion)nfc15_nci_cmd_wq#13 ->(wq_completion)nfc13_nci_cmd_wq#11 ->(wq_completion)nfc14_nci_cmd_wq#12 ->(wq_completion)nfc12_nci_cmd_wq#13 ->(wq_completion)nfc11_nci_cmd_wq#14 ->(wq_completion)nfc10_nci_cmd_wq#12 ->(wq_completion)nfc9_nci_cmd_wq#15 ->(wq_completion)nfc7_nci_cmd_wq#17 ->(wq_completion)nfc6_nci_cmd_wq#24 ->(wq_completion)nfc8_nci_cmd_wq#14 ->(wq_completion)nfc5_nci_cmd_wq#60 ->(wq_completion)nfc4_nci_cmd_wq#232 ->(wq_completion)nfc3_nci_cmd_wq#665 ->(wq_completion)nfc2_nci_cmd_wq#1115 ->(wq_completion)nfc2_nci_cmd_wq#1116 ->(wq_completion)nfc2_nci_cmd_wq#1117 ->(wq_completion)nfc2_nci_cmd_wq#1118 ->(wq_completion)nfc2_nci_cmd_wq#1119 ->(wq_completion)nfc2_nci_cmd_wq#1120 ->(wq_completion)nfc2_nci_cmd_wq#1121 ->(wq_completion)nfc3_nci_cmd_wq#666 ->(wq_completion)nfc2_nci_cmd_wq#1122 ->(wq_completion)nfc2_nci_cmd_wq#1123 ->(wq_completion)nfc2_nci_cmd_wq#1124 ->(wq_completion)nfc2_nci_cmd_wq#1125 ->(wq_completion)nfc4_nci_cmd_wq#233 ->(wq_completion)nfc3_nci_cmd_wq#668 ->(wq_completion)nfc2_nci_cmd_wq#1126 ->(wq_completion)nfc3_nci_cmd_wq#669 ->(wq_completion)nfc6_nci_cmd_wq#25 ->(wq_completion)nfc5_nci_cmd_wq#61 ->(wq_completion)nfc16_nci_cmd_wq#13 ->(wq_completion)nfc27_nci_cmd_wq#11 ->(wq_completion)nfc40_nci_cmd_wq#4 ->(wq_completion)nfc2_nci_cmd_wq#1127 ->(wq_completion)nfc3_nci_cmd_wq#670 ->(wq_completion)nfc4_nci_cmd_wq#234 ->(wq_completion)nfc7_nci_cmd_wq#18 ->(wq_completion)nfc8_nci_cmd_wq#15 ->(wq_completion)nfc9_nci_cmd_wq#16 ->(wq_completion)nfc5_nci_cmd_wq#62 ->(wq_completion)nfc6_nci_cmd_wq#26 ->(wq_completion)nfc10_nci_cmd_wq#13 ->(wq_completion)nfc11_nci_cmd_wq#15 ->(wq_completion)nfc12_nci_cmd_wq#14 ->(wq_completion)nfc43_nci_cmd_wq#2 ->(wq_completion)nfc13_nci_cmd_wq#12 ->(wq_completion)nfc17_nci_cmd_wq#15 ->(wq_completion)nfc16_nci_cmd_wq#14 ->(wq_completion)nfc19_nci_cmd_wq#12 ->(wq_completion)nfc10_nci_cmd_wq#14 ->(wq_completion)nfc6_nci_cmd_wq#27 ->(wq_completion)nfc20_nci_cmd_wq#14 ->(wq_completion)nfc8_nci_cmd_wq#16 ->(wq_completion)nfc23_nci_cmd_wq#11 ->(wq_completion)nfc7_nci_cmd_wq#19 ->(wq_completion)nfc24_nci_cmd_wq#11 ->(wq_completion)nfc3_nci_cmd_wq#671 ->(wq_completion)nfc40_nci_cmd_wq#5 ->(wq_completion)nfc28_nci_cmd_wq#13 ->(wq_completion)nfc39_nci_cmd_wq#4 ->(wq_completion)nfc29_nci_cmd_wq#10 ->(wq_completion)nfc3_nci_cmd_wq#672 ->(wq_completion)nfc38_nci_cmd_wq#4 ->(wq_completion)nfc31_nci_cmd_wq#11 ->(wq_completion)nfc33_nci_cmd_wq#9 ->(wq_completion)nfc35_nci_cmd_wq#5 ->(wq_completion)nfc36_nci_cmd_wq#5 ->(wq_completion)nfc34_nci_cmd_wq#11 ->(wq_completion)nfc37_nci_cmd_wq#4 ->(wq_completion)nfc17_nci_cmd_wq#16 ->(wq_completion)nfc32_nci_cmd_wq#9 ->(wq_completion)nfc27_nci_cmd_wq#12 ->(wq_completion)nfc41_nci_cmd_wq#4 ->(wq_completion)nfc30_nci_cmd_wq#12 ->(wq_completion)nfc2_nci_cmd_wq#1128 ->(wq_completion)nfc26_nci_cmd_wq#13 ->(wq_completion)nfc25_nci_cmd_wq#14 ->(wq_completion)nfc22_nci_cmd_wq#11 ->(wq_completion)nfc4_nci_cmd_wq#235 ->(wq_completion)nfc21_nci_cmd_wq#14 ->(wq_completion)nfc42_nci_cmd_wq#3 ->(wq_completion)nfc18_nci_cmd_wq#15 ->(wq_completion)nfc15_nci_cmd_wq#14 ->(wq_completion)nfc9_nci_cmd_wq#17 ->(wq_completion)nfc14_nci_cmd_wq#13 ->(wq_completion)nfc5_nci_cmd_wq#63 ->(wq_completion)nfc11_nci_cmd_wq#16 ->(wq_completion)nfc12_nci_cmd_wq#15 ->(wq_completion)nfc13_nci_cmd_wq#14 ->(wq_completion)nfc10_nci_cmd_wq#15 ->(wq_completion)nfc6_nci_cmd_wq#28 ->(wq_completion)nfc7_nci_cmd_wq#20 ->(wq_completion)nfc3_nci_cmd_wq#673 ->(wq_completion)nfc8_nci_cmd_wq#17 ->(wq_completion)nfc16_nci_cmd_wq#15 ->(wq_completion)nfc19_nci_cmd_wq#13 ->(wq_completion)nfc17_nci_cmd_wq#17 ->(wq_completion)nfc18_nci_cmd_wq#16 ->(wq_completion)nfc14_nci_cmd_wq#14 ->(wq_completion)nfc21_nci_cmd_wq#15 ->(wq_completion)nfc17_nci_cmd_wq#18 ->(wq_completion)nfc16_nci_cmd_wq#16 ->(wq_completion)nfc8_nci_cmd_wq#18 ->(wq_completion)nfc7_nci_cmd_wq#21 ->(wq_completion)nfc3_nci_cmd_wq#674 ->(wq_completion)nfc6_nci_cmd_wq#29 ->(wq_completion)nfc10_nci_cmd_wq#16 ->(wq_completion)nfc13_nci_cmd_wq#15 ->(wq_completion)nfc12_nci_cmd_wq#16 ->(wq_completion)nfc11_nci_cmd_wq#17 ->(wq_completion)nfc5_nci_cmd_wq#64 ->(wq_completion)nfc9_nci_cmd_wq#18 ->(wq_completion)nfc15_nci_cmd_wq#15 ->(wq_completion)nfc20_nci_cmd_wq#15 ->(wq_completion)nfc4_nci_cmd_wq#236 ->(wq_completion)nfc2_nci_cmd_wq#1129 ->(wq_completion)nfc2_nci_cmd_wq#1130 ->(wq_completion)nfc3_nci_cmd_wq#675 ->(wq_completion)nfc2_nci_cmd_wq#1131 ->(wq_completion)nfc3_nci_cmd_wq#676 ->(wq_completion)nfc4_nci_cmd_wq#237 ->(wq_completion)nfc2_nci_cmd_wq#1132 ->(wq_completion)nfc3_nci_cmd_wq#677 ->(wq_completion)nfc2_nci_cmd_wq#1133 ->(wq_completion)nfc3_nci_cmd_wq#678 ->(wq_completion)nfc2_nci_cmd_wq#1134 ->(wq_completion)nfc3_nci_cmd_wq#679 ->(wq_completion)nfc4_nci_cmd_wq#238 ->(wq_completion)nfc2_nci_cmd_wq#1135 ->(wq_completion)nfc3_nci_cmd_wq#680 ->(wq_completion)nfc2_nci_cmd_wq#1136 ->(wq_completion)nfc2_nci_cmd_wq#1137 ->(wq_completion)nfc3_nci_cmd_wq#681 ->(wq_completion)nfc2_nci_cmd_wq#1138 ->(wq_completion)nfc2_nci_cmd_wq#1139 ->(wq_completion)nfc2_nci_cmd_wq#1140 ->(wq_completion)nfc3_nci_cmd_wq#682 ->(wq_completion)nfc2_nci_cmd_wq#1141 ->(wq_completion)nfc2_nci_cmd_wq#1142 ->(wq_completion)nfc3_nci_cmd_wq#683 ->(wq_completion)nfc2_nci_cmd_wq#1143 ->(wq_completion)nfc3_nci_cmd_wq#684 ->(wq_completion)nfc2_nci_cmd_wq#1144 ->(wq_completion)nfc2_nci_cmd_wq#1145 ->(wq_completion)nfc2_nci_cmd_wq#1146 ->(wq_completion)nfc9_nci_cmd_wq#19 ->(wq_completion)nfc11_nci_cmd_wq#18 ->(wq_completion)nfc18_nci_cmd_wq#17 ->(wq_completion)nfc21_nci_cmd_wq#16 ->(wq_completion)nfc26_nci_cmd_wq#14 ->(wq_completion)nfc30_nci_cmd_wq#13 ->(wq_completion)nfc31_nci_cmd_wq#12 ->(wq_completion)nfc34_nci_cmd_wq#12 ->(wq_completion)nfc33_nci_cmd_wq#10 ->(wq_completion)nfc35_nci_cmd_wq#6 ->(wq_completion)nfc31_nci_cmd_wq#13 ->(wq_completion)nfc32_nci_cmd_wq#10 ->(wq_completion)nfc30_nci_cmd_wq#14 ->(wq_completion)nfc29_nci_cmd_wq#11 ->(wq_completion)nfc28_nci_cmd_wq#14 FD: 35 BD: 6 +.+.: (work_completion)(&(&bat_priv->tt.work)->work) ->key#16 ->key#21 ->&bat_priv->tt.req_list_lock ->&bat_priv->tt.roam_list_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 2 ....: (&ndev->cmd_timer) FD: 1 BD: 2 ....: (&ndev->data_timer) FD: 31 BD: 1 ..-.: &(&bat_priv->tt.work)->timer FD: 1 BD: 59 +.-.: &r->producer_lock#3 FD: 26 BD: 5 +.+.: (work_completion)(&rfkill->uevent_work) ->&rq->__lock FD: 85 BD: 1 +.-.: (&ndev->rs_timer) ->&ndev->lock ->pool_lock#2 ->&dir->lock#2 ->&ul->lock#2 ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->&zone->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->init_task.mems_allowed_seq.seqcount FD: 1 BD: 4 ++++: &local->sockets.lock FD: 1 BD: 3 +.+.: &local->raw_sockets.lock FD: 1 BD: 1 ....: (&local->link_timer) FD: 26 BD: 1 +.+.: (work_completion)(&local->tx_work) ->&rq->__lock FD: 26 BD: 1 +.+.: (work_completion)(&local->rx_work) ->&rq->__lock FD: 26 BD: 1 +.+.: (work_completion)(&local->timeout_work) ->&rq->__lock FD: 1 BD: 1 ....: (&local->sdreq_timer) FD: 26 BD: 1 +.+.: (work_completion)(&local->sdreq_timeout_work) ->&rq->__lock FD: 1 BD: 7 +...: key#21 FD: 1 BD: 7 +...: &bat_priv->tt.req_list_lock FD: 1 BD: 7 +...: &bat_priv->tt.roam_list_lock FD: 27 BD: 13 ....: &sk->sk_lock.wq ->&p->pi_lock FD: 1 BD: 120 +.+.: smack_known_lock.wait_lock FD: 1 BD: 102 ....: key#22 FD: 93 BD: 1 +.+.: &type->s_umount_key#47 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->pool_lock#2 ->rename_lock.seqcount ->&dentry->d_lock ->&dentry->d_lock/1 ->&sb->s_type->i_lock_key#32 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->binderfs_minors_mutex ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&rcu_state.gp_wq FD: 1 BD: 1 +.+.: &mq_lock FD: 114 BD: 2 +.+.: free_ipc_work ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->mount_lock ->&fsnotify_mark_srcu ->&dentry->d_lock ->&type->s_umount_key#48 ->sb_lock ->unnamed_dev_ida.xa_lock ->list_lrus_mutex ->&xa->xa_lock#5 ->pool_lock#2 ->mnt_id_ida.xa_lock ->&ids->rwsem ->(work_completion)(&ht->run_work) ->&ht->mutex ->percpu_counters_lock ->pcpu_lock ->sysctl_lock ->&sb->s_type->i_lock_key#23 ->rename_lock.seqcount ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->proc_inum_ida.xa_lock ->pool_lock ->&rnp->exp_lock ->&rnp->exp_wq[0] ->rcu_state.exp_mutex ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->stock_lock ->quarantine_lock ->&____s->seqcount ->&cfs_rq->removed.lock FD: 90 BD: 3 +.+.: &type->s_umount_key#48 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->pool_lock#2 ->rename_lock.seqcount ->&dentry->d_lock ->&sb->s_type->i_lock_key#19 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&dentry->d_lock/1 ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock FD: 60 BD: 53 +.-.: (&peer->timer_retransmit_handshake) ->&peer->endpoint_lock ->&obj_hash[i].lock ->&list->lock#14 FD: 1 BD: 1 +.+.: (work_completion)(&data->suspend_work) FD: 1 BD: 1 +.+.: (work_completion)(&hdev->reenable_adv_work) FD: 26 BD: 12 +.+.: (work_completion)(&(&hdev->interleave_scan)->work) ->&rq->__lock FD: 1 BD: 23 +.+.: (work_completion)(&(&conn->id_addr_timer)->work) FD: 26 BD: 1 +.+.: (work_completion)(&(&ctx->fallback_work)->work) ->&rq->__lock FD: 56 BD: 1 +.+.: &udc->connect_lock ->&dum_hcd->dum->lock ->&rq->__lock ->hcd_root_hub_lock ->&queue->lock ->udc_lock FD: 70 BD: 22 +.+.: (work_completion)(&(&conn->disc_work)->work) ->&hdev->unregister_lock FD: 1 BD: 22 +.+.: (work_completion)(&(&conn->auto_accept_work)->work) FD: 1 BD: 22 +.+.: (work_completion)(&(&conn->idle_work)->work) FD: 1 BD: 23 +.+.: hci_cb_list_lock.wait_lock FD: 45 BD: 55 +.-.: _xmit_NETROM ->(console_sem).lock ->console_owner_lock ->console_owner ->&obj_hash[i].lock ->pool_lock#2 ->&dir->lock ->stock_lock ->&____s->seqcount ->&meta->lock ->kfence_freelist_lock FD: 27 BD: 52 +.+.: __ip_vs_mutex ->&ipvs->dest_trash_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 53 +...: &ipvs->dest_trash_lock FD: 26 BD: 52 +.+.: flowtable_lock ->&rq->__lock FD: 11 BD: 56 +...: &idev->mc_query_lock ->&obj_hash[i].lock FD: 26 BD: 56 +.+.: (work_completion)(&(&idev->mc_report_work)->work) ->&rq->__lock FD: 26 BD: 56 +.+.: &net->xdp.lock ->&rq->__lock FD: 1 BD: 56 +.+.: mirred_list_lock FD: 1 BD: 56 +...: &idev->mc_report_lock FD: 27 BD: 56 +.+.: &pnn->pndevs.lock ->&rq->__lock FD: 26 BD: 56 +.+.: &pnn->routes.lock ->&rq->__lock ->&cfs_rq->removed.lock FD: 873 BD: 1 +.+.: (wq_completion)netns ->net_cleanup_work FD: 872 BD: 2 +.+.: net_cleanup_work ->pernet_ops_rwsem ->rcu_state.barrier_mutex ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock ->&dir->lock ->rcu_state.barrier_mutex.wait_lock ->&p->pi_lock ->stock_lock ->quarantine_lock ->&rq->__lock ->&base->lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->ucounts_lock FD: 1 BD: 5 +...: &net->nsid_lock FD: 1 BD: 5 +...: &tn->node_list_lock FD: 1 BD: 52 +...: &net->xfrm.xfrm_state_lock FD: 1 BD: 54 +...: &net->xfrm.xfrm_policy_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#730 ->&rq->__lock FD: 31 BD: 1 ..-.: &(&bat_priv->dat.work)->timer FD: 31 BD: 1 ..-.: &(&bat_priv->bla.work)->timer FD: 30 BD: 6 +.+.: (work_completion)(&(&bat_priv->dat.work)->work) ->&hash->list_locks[i] ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->pool_lock#2 ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 7 +...: &hash->list_locks[i] FD: 33 BD: 6 +.+.: (work_completion)(&(&bat_priv->bla.work)->work) ->key#20 ->pool_lock#2 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->rcu_node_0 ->crngs.lock ->&cfs_rq->removed.lock FD: 222 BD: 1 +.+.: (wq_completion)vsock-loopback ->(work_completion)(&vsock->pkt_work) FD: 4 BD: 54 +...: _xmit_NONE#2 ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: (work_completion)(&data->fib_flush_work) FD: 26 BD: 1 +.+.: ®ion->snapshot_lock ->&rq->__lock FD: 1 BD: 5 +.+.: netns_bpf_mutex FD: 1 BD: 5 ....: (&net->fs_probe_timer) FD: 26 BD: 7 ++++: &net->cells_lock ->&rq->__lock FD: 1 BD: 5 ....: (&net->cells_timer) FD: 32 BD: 1 +.+.: (wq_completion)afs ->(work_completion)(&net->cells_manager) ->(work_completion)(&net->fs_manager) FD: 29 BD: 2 +.+.: (work_completion)(&net->cells_manager) ->&net->cells_lock ->bit_wait_table + i ->&rq->__lock FD: 1 BD: 5 ....: (&net->fs_timer) FD: 29 BD: 2 +.+.: (work_completion)(&net->fs_manager) ->&(&net->fs_lock)->lock ->bit_wait_table + i ->&rq->__lock FD: 1 BD: 3 +.+.: &(&net->fs_lock)->lock FD: 1 BD: 6 +.+.: &rx->incoming_lock FD: 1 BD: 6 +.+.: &call->notify_lock FD: 1 BD: 6 ....: (rxrpc_call_limiter).lock FD: 1 BD: 6 +.+.: &rx->recvmsg_lock FD: 1 BD: 6 ....: (&call->timer) FD: 1 BD: 6 ....: &list->lock#17 FD: 1 BD: 5 +.+.: (wq_completion)kafsd FD: 1 BD: 5 +...: k-clock-AF_RXRPC FD: 1 BD: 5 ..-.: rlock-AF_RXRPC FD: 1 BD: 1 ....: (&local->client_conn_reap_timer) FD: 1 BD: 1 ....: &list->lock#18 FD: 1 BD: 8 +.+.: (work_completion)(&data->gc_work) FD: 1 BD: 5 +.+.: (work_completion)(&ovs_net->dp_notify_work) FD: 1 BD: 5 +...: &srv->idr_lock FD: 1 BD: 5 ....: (&rxnet->service_conn_reap_timer) FD: 1 BD: 55 +.+.: rcu_state.barrier_mutex.wait_lock FD: 1 BD: 7 +...: &nt->cluster_scope_lock FD: 1 BD: 128 +.+.: rcu_state.exp_mutex.wait_lock FD: 1 BD: 5 +.+.: (work_completion)(&tn->work) FD: 1 BD: 5 +.+.: (work_completion)(&(&c->work)->work) FD: 1 BD: 54 +...: _xmit_SIT#2 FD: 1 BD: 5 +.+.: (wq_completion)krdsd FD: 1 BD: 5 +.+.: (work_completion)(&rtn->rds_tcp_accept_w) FD: 1 BD: 56 +...: &icsk->icsk_accept_queue.rskq_lock#2 FD: 1 BD: 5 ....: rds_tcp_conn_lock FD: 1 BD: 5 ....: loop_conns_lock FD: 1 BD: 5 +.+.: (wq_completion)l2tp FD: 2 BD: 6 +.+.: (work_completion)(&rxnet->service_conn_reaper) ->&rxnet->conn_lock FD: 1 BD: 54 +...: _xmit_TUNNEL#2 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#441 ->&rq->__lock FD: 49 BD: 2 +.+.: (work_completion)(&(&devlink->rwork)->work) ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->pool_lock#2 ->&cfs_rq->removed.lock ->&rnp->exp_lock ->rcu_state.exp_mutex ->quarantine_lock ->pool_lock ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->rcu_node_0 FD: 1 BD: 9 +.+.: &fn->fou_lock FD: 1 BD: 5 +.+.: ipvs->sync_mutex FD: 1 BD: 56 +.+.: (work_completion)(&sta->drv_deliver_wk) FD: 1 BD: 56 ....: (&ifibss->timer) FD: 1 BD: 52 +.+.: (work_completion)(&wdev->disconnect_wk) FD: 1 BD: 52 +.+.: (work_completion)(&wdev->pmsr_free_wk) FD: 712 BD: 6 +.+.: (work_completion)(&(&rdev->dfs_update_channels_wk)->work) ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 56 ....: (&local->dynamic_ps_timer) FD: 1 BD: 56 ....: (&dwork->timer)#3 FD: 1 BD: 52 +.+.: (work_completion)(&(&priv->scan_result)->work) FD: 1 BD: 52 ....: &rdev->dev_wait FD: 1 BD: 56 ....: (&dwork->timer)#4 FD: 1 BD: 5 +.+.: (work_completion)(&local->restart_work) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->conn_work) FD: 1 BD: 5 +.+.: (work_completion)(&(&rdev->background_cac_done_wk)->work) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->destroy_work) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->propagate_radar_detect_wk) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->propagate_cac_done_wk) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->mgmt_registrations_update_wk) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->background_cac_abort_wk) FD: 1 BD: 5 ....: (&local->sta_cleanup) FD: 3 BD: 5 +.+.: rdma_nets_rwsem ->rdma_nets.xa_lock FD: 1 BD: 5 +...: k-clock-AF_NETLINK FD: 1 BD: 52 +.+.: (work_completion)(&(&priv->connect)->work) FD: 1 BD: 52 +...: &bat_priv->forw_bcast_list_lock FD: 1 BD: 75 ....: net_rwsem.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#3 FD: 44 BD: 53 +.+.: (work_completion)(&br->mcast_gc_work) ->&br->multicast_lock ->(&p->rexmit_timer) ->&obj_hash[i].lock ->&base->lock ->(&p->timer) ->pool_lock#2 ->krc.lock ->(&mp->timer) ->&rq->__lock FD: 1 BD: 54 ....: (&p->rexmit_timer) FD: 40 BD: 54 +.-.: (&p->timer) ->&br->multicast_lock FD: 40 BD: 54 +.-.: (&mp->timer) ->&br->multicast_lock FD: 1 BD: 52 ....: (&pmctx->ip6_mc_router_timer) FD: 1 BD: 52 ....: (&pmctx->ip4_mc_router_timer) FD: 1 BD: 54 +...: &vlan_netdev_xmit_lock_key FD: 1 BD: 54 +...: &batadv_netdev_xmit_lock_key FD: 1 BD: 54 +...: &qdisc_xmit_lock_key#3 FD: 1 BD: 54 +...: &qdisc_xmit_lock_key#4 FD: 1 BD: 54 +...: _xmit_LOOPBACK#2 FD: 35 BD: 53 +.-.: (&peer->timer_send_keepalive) ->&c->lock ->pool_lock#2 ->&list->lock#14 ->tk_core.seq.seqcount ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 59 BD: 53 +.-.: (&peer->timer_new_handshake) ->&peer->endpoint_lock FD: 1 BD: 53 ....: (&peer->timer_zero_key_material) FD: 1 BD: 53 +.+.: (work_completion)(&peer->clear_peer_work) FD: 1 BD: 52 +.+.: (work_completion)(&(&bond->mii_work)->work) FD: 1 BD: 52 +.+.: (work_completion)(&(&bond->arp_work)->work) FD: 1 BD: 52 +.+.: (work_completion)(&(&bond->alb_work)->work) FD: 1 BD: 52 +.+.: (work_completion)(&(&bond->ad_work)->work) FD: 1 BD: 52 +.+.: (work_completion)(&(&bond->mcast_work)->work) FD: 1 BD: 52 +.+.: (work_completion)(&(&bond->slave_arr_work)->work) FD: 1 BD: 52 ....: (&br->hello_timer) FD: 1 BD: 52 ....: (&br->topology_change_timer) FD: 1 BD: 52 ....: (&br->tcn_timer) FD: 1 BD: 52 ....: (&brmctx->ip4_mc_router_timer) FD: 1 BD: 52 ....: (&brmctx->ip4_other_query.timer) FD: 1 BD: 52 ....: (&brmctx->ip4_other_query.delay_timer) FD: 1 BD: 52 ....: (&brmctx->ip6_mc_router_timer) FD: 1 BD: 52 ....: (&brmctx->ip6_other_query.timer) FD: 1 BD: 52 ....: (&brmctx->ip6_other_query.delay_timer) FD: 1 BD: 52 +.+.: raw_notifier_lock FD: 1 BD: 52 +.+.: bcm_notifier_lock FD: 1 BD: 52 +.+.: isotp_notifier_lock FD: 1 BD: 52 +.+.: (work_completion)(&port->wq) FD: 31 BD: 1 ..-.: &(&fw_cache.work)->timer FD: 1 BD: 7 +.+.: &fw_cache.name_lock FD: 1 BD: 52 +...: &bond->ipsec_lock FD: 1 BD: 52 +...: _xmit_NETROM#2 FD: 26 BD: 1 +.+.: (work_completion)(&(&team->mcast_rejoin.dw)->work) ->&rq->__lock FD: 26 BD: 1 +.+.: (work_completion)(&(&team->notify_peers.dw)->work) ->&rq->__lock FD: 1 BD: 60 ..-.: key#23 FD: 1 BD: 54 +...: &this->info_list_lock FD: 1 BD: 5 +.+.: &pnetids_ndev->lock FD: 30 BD: 5 +.+.: k-sk_lock-AF_INET6/1 ->k-slock-AF_INET6 ->rlock-AF_INET6 ->&list->lock#19 FD: 1 BD: 6 ....: rlock-AF_INET6 FD: 1 BD: 6 ....: &list->lock#19 FD: 7 BD: 56 +...: k-slock-AF_INET6/1 ->&sctp_ep_hashtable[i].lock ->&obj_hash[i].lock ->pool_lock#2 ->k-clock-AF_INET6 ->key#25 FD: 1 BD: 57 +...: &sctp_ep_hashtable[i].lock FD: 75 BD: 3 +.+.: nlk_cb_mutex-NETFILTER ->fs_reclaim ->&c->lock ->pool_lock#2 ->&n->list_lock ->&nf_conntrack_locks[i] ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->&____s->seqcount#7 ->&nf_conntrack_locks[i]/1 ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#55 FD: 213 BD: 1 +.+.: sk_lock-AF_RDS ->&rq->__lock ->slock-AF_RDS ->&mm->mmap_lock ->rds_trans_sem FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#505 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#247 FD: 34 BD: 2 +.+.: (work_completion)(&nlk->work) ->&obj_hash[i].lock ->pool_lock#2 ->rlock-AF_NETLINK ->&dir->lock ->quarantine_lock FD: 1 BD: 3 +...: slock-AF_PPPOX FD: 1 BD: 1 ....: net_ratelimit_state.lock FD: 67 BD: 3 +.+.: &ctx->uring_lock ->fs_reclaim ->remove_cache_srcu ->pool_lock#2 ->percpu_ref_switch_lock ->&rq->__lock ->&table->hbs[i].lock ->&acct->lock ->&wq->lock ->&ctx->completion_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->&lock->wait_lock ->&c->lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->&p->pi_lock FD: 17 BD: 1 +.-.: (&n->timer) ->&n->lock FD: 13 BD: 1 +.+.: &xa->xa_lock#20 ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 5 +.+.: &sn->gssp_lock FD: 1 BD: 8 +.+.: &cd->hash_lock FD: 1 BD: 5 +.+.: xfrm_state_gc_work FD: 1 BD: 5 +...: ip6_fl_lock FD: 1 BD: 5 ....: (&net->ipv6.ip6_fib_timer) FD: 1 BD: 52 ....: (&mrt->ipmr_expire_timer) FD: 1 BD: 5 ....: (&ipvs->dest_trash_timer) FD: 1 BD: 5 +.+.: (work_completion)(&(&ipvs->expire_nodest_conn_work)->work) FD: 1 BD: 5 +.+.: (work_completion)(&(&ipvs->est_reload_work)->work) FD: 1 BD: 5 +...: recent_lock FD: 1 BD: 5 +.+.: hashlimit_mutex FD: 1 BD: 5 +.+.: trans_gc_work FD: 1 BD: 5 +.+.: (work_completion)(&(&cnet->ecache.dwork)->work) FD: 1 BD: 5 +.+.: (work_completion)(&net->xfrm.policy_hash_work) FD: 1 BD: 5 +.+.: (work_completion)(&net->xfrm.state_hash_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#168 FD: 1 BD: 5 +...: &pernet->lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#716 ->&rq->__lock FD: 263 BD: 3 +.+.: &sqd->lock ->&sqd->wait ->&rq->__lock ->&p->pi_lock ->&acct->lock ->&wq->lock ->key#26 ->&x->wait#29 ->&hash->wait ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 ->percpu_counters_lock ->pcpu_lock ->&cfs_rq->removed.lock ->&lock->wait_lock FD: 1 BD: 2 +...: &mux->lock FD: 1 BD: 3 +...: slock-AF_KCM FD: 1 BD: 1 +.+.: &type->s_umount_key#49 FD: 80 BD: 3 +.+.: pfkey_mutex ->&rq->__lock ->crypto_alg_sem ->(kmod_concurrent_max).lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&x->wait#16 ->running_helpers_waitq.lock ->(crypto_chain).rwsem ->&x->wait#20 ->&base->lock ->(&timer.timer) ->&n->list_lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->pfkey_mutex.wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->&____s->seqcount ->remove_cache_srcu FD: 1 BD: 5 ....: (pm_chain_head).rwsem.wait_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#672 FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg2#295 ->(work_completion)(&peer->transmit_handshake_work) FD: 78 BD: 1 +.-.: (&sk->sk_timer) ->slock-AF_INET#2 FD: 1 BD: 52 +...: nr_neigh_list_lock FD: 1 BD: 52 +...: &bpq_netdev_addr_lock_key FD: 27 BD: 27 ..-.: semaphore->lock#2 ->&p->pi_lock FD: 1 BD: 168 +.+.: dpm_list_mtx.wait_lock FD: 1 BD: 77 ....: key#24 FD: 27 BD: 15 ....: &root->deactivate_waitq ->&p->pi_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#297 FD: 26 BD: 2 +.+.: &iopt->iova_rwsem ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#165 FD: 1 BD: 1 +.+.: &resv_map->lock FD: 6 BD: 1 +.+.: put_task_map-wait-type-override#5 ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock FD: 28 BD: 1 +.+.: sk_lock-AF_IEEE802154 ->slock-AF_IEEE802154 ->rcu_node_0 ->&rq->__lock FD: 97 BD: 3 +.+.: &port_dev->status_lock ->&hub->status_mutex ->hcd->address0_mutex ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&c->lock ->hcd_root_hub_lock ->fs_reclaim ->&dum_hcd->dum->lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#19 ->&base->lock ->(&timer.timer) ->&n->list_lock ->remove_cache_srcu FD: 1 BD: 3 +.+.: &q->instances_lock FD: 1 BD: 4 +.+.: oom_adj_mutex.wait_lock FD: 1 BD: 3 ....: sync_timeline_list_lock FD: 3 BD: 147 ....: kernfs_pr_cont_lock ->kernfs_rename_lock ->(console_sem).lock FD: 1 BD: 15 +.+.: nbd_index_mutex.wait_lock FD: 1 BD: 3 +.+.: system_transition_mutex.wait_lock FD: 1 BD: 4 +.+.: pfkey_mutex.wait_lock FD: 31 BD: 1 +.-.: (&pool->idle_timer) ->&pool->lock FD: 31 BD: 2 +.+.: (work_completion)(&ns->work) ->sysctl_lock ->&obj_hash[i].lock ->pool_lock#2 ->keyring_name_lock ->&rq->__lock ->proc_inum_ida.xa_lock ->stock_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#5 FD: 31 BD: 1 ..-.: &(&net->ipv6.addr_chk_work)->timer FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1105 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#220 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#580 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#244 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#427 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 5 +...: &list->lock#21 FD: 1 BD: 2 ....: &rs->rs_lock FD: 1 BD: 2 +...: raw_lock FD: 31 BD: 1 ..-.: net/wireless/reg.c:236 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#609 ->&rq->__lock FD: 1 BD: 2 ....: &rs->rs_recv_lock FD: 1 BD: 117 ....: key#27 FD: 8 BD: 1 +.+.: put_task_map-wait-type-override#3 ->&obj_hash[i].lock ->percpu_counters_lock ->pcpu_lock ->pool_lock#2 ->stock_lock FD: 1 BD: 103 +.+.: wq_pool_mutex.wait_lock FD: 2 BD: 3 +...: rose_node_list_lock ->rose_neigh_list_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#51 ->&rq->__lock FD: 1 BD: 55 ..-.: &list->lock#20 FD: 1 BD: 52 +.-.: x25_list_lock FD: 40 BD: 1 +.-.: (&hcd->rh_timer) ->&dum_hcd->dum->lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#635 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#919 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#900 ->&rq->__lock FD: 1 BD: 4 ....: &hash->wait FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#282 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#173 FD: 1 BD: 4 ....: key#26 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#200 FD: 1 BD: 2 +.+.: loop_validate_mutex.wait_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#33 FD: 1 BD: 2 +...: &xs->map_list_lock FD: 1 BD: 1 +...: link_idr_lock FD: 1 BD: 5 +...: &vvs->tx_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#522 FD: 1 BD: 2 +.+.: &knet->mutex FD: 1 BD: 5 +.+.: &wq->lock FD: 1 BD: 3 ....: rlock-AF_KEY FD: 1 BD: 110 +.+.: rcu_state.exp_wake_mutex.wait_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#860 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#15 FD: 32 BD: 1 +...: put_task_map-wait-type-override#4 ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#423 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1064 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#549 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#737 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#467 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1141 ->&rq->__lock FD: 1 BD: 4 +...: slock-AF_NFC FD: 1 BD: 3 ....: sync_file_list_lock FD: 1 BD: 1 ....: _rs.lock#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#817 FD: 16 BD: 1 -.-.: &ctx->wqh ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&alarm_bases[i].lock ->hrtimer_bases.lock FD: 1 BD: 10 +.+.: &file->master_lookup_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#477 FD: 1 BD: 1 +...: bpf_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#422 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#420 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#394 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#467 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#3 FD: 1 BD: 2 +...: rds_sock_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#13 FD: 31 BD: 1 ..-.: &(&conn->disc_work)->timer FD: 34 BD: 2 +.+.: (work_completion)(&pool->idle_cull_work) ->wq_pool_attach_mutex ->wq_pool_attach_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 ....: &rs->rs_rdma_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#490 ->&rq->__lock FD: 1 BD: 4 ....: &x->wait#29 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#692 ->&rq->__lock FD: 1 BD: 61 ..-.: key#28 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#616 ->&rq->__lock FD: 211 BD: 2 .+.+: sb_writers#12 ->&rq->__lock ->&mm->mmap_lock FD: 1 BD: 5 +...: &vvs->rx_lock FD: 1 BD: 9 +.+.: nf_conntrack_mutex.wait_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1055 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#426 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#929 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#698 FD: 1 BD: 94 +.+.: dev_pm_qos_sysfs_mtx.wait_lock FD: 1 BD: 2 +.+.: &xs->mutex FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#511 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#10 FD: 4 BD: 1 +.+.: put_task_map-wait-type-override#2 ->&obj_hash[i].lock ->pool_lock#2 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#264 ->&rq->__lock FD: 28 BD: 2 +.+.: (work_completion)(&vmpr->work) ->&vmpr->sr_lock ->&vmpr->events_lock ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1097 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#115 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#215 FD: 33 BD: 2 +.+.: (work_completion)(&aux->work)#2 ->&rq->__lock ->&aux->poke_mutex ->map_idr_lock ->&obj_hash[i].lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#273 FD: 3 BD: 4 +.+.: subsys mutex#77 ->&k->k_lock FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg1#299 ->(work_completion)(&peer->transmit_handshake_work) FD: 44 BD: 3 +.+.: usblp_mutex ->&usblp->mut ->&anchor->lock ->(console_sem).lock ->&obj_hash[i].lock ->pool_lock#2 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#291 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#517 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#97 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1048 ->&rq->__lock FD: 1 BD: 2 +...: slock-AF_RDS FD: 1 BD: 18 +...: clock-AF_TIPC FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#25 ->&rq->__lock FD: 2 BD: 4 +.+.: &ctx->completion_lock ->&ctx->timeout_lock FD: 221 BD: 2 +.+.: (work_completion)(&vsock->pkt_work) ->&list->lock#21 ->vsock_table_lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->sk_lock-AF_VSOCK ->slock-AF_VSOCK ->&c->lock ->&n->list_lock ->&dir->lock ->stock_lock ->&rq->__lock ->quarantine_lock ->&____s->seqcount#2 ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 28 BD: 26 ..-.: &queue->lock ->pool_lock#2 ->semaphore->lock#2 ->&c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&n->list_lock FD: 1 BD: 199 +.+.: &sma->sems[i].lock FD: 266 BD: 16 +.+.: &nsock->tx_lock ->&u->lock ->&rq->__lock ->sk_lock-AF_INET6 ->slock-AF_INET6 ->sk_lock-AF_TIPC ->slock-AF_TIPC FD: 55 BD: 1 +.-.: (&dum_hcd->timer) ->&dum_hcd->dum->lock ->&dev->lock ->&queue->lock ->&x->wait#27 ->lock#6 ->&x->wait#19 FD: 15 BD: 2 -...: &alarm_bases[i].lock ->hrtimer_bases.lock FD: 1 BD: 119 +.+.: &pa->pa_lock#2 FD: 2 BD: 2 +...: &mux->rx_lock ->rlock-AF_KCM FD: 1 BD: 2 +...: clock-AF_RDS FD: 189 BD: 1 +.+.: (wq_completion)hci1#2 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->(work_completion)(&(&conn->disc_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#734 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#421 FD: 1 BD: 53 ....: (&lapb->t2timer) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#636 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1070 FD: 1 BD: 5 ....: &anchor->lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#256 FD: 1 BD: 1 .+.+: drm_unplug_srcu FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#583 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#920 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#443 ->&rq->__lock FD: 1 BD: 5 ....: &ctx->timeout_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#577 ->&rq->__lock FD: 712 BD: 2 +.+.: (reg_check_chans).work ->rtnl_mutex FD: 12 BD: 1 +.+.: &xa->xa_lock#22 ->stock_lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&n->list_lock FD: 95 BD: 4 +.+.: hcd->address0_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->devtree_lock ->&dev->power.lock ->device_state_lock ->&bus->devnum_next_mutex ->mmu_notifier_invalidate_range_start ->ehci_cf_port_reset_rwsem ->(console_sem).lock ->&rq->__lock ->&c->lock ->&dum_hcd->dum->lock ->&x->wait#19 ->&base->lock ->(&timer.timer) ->hcd_urb_list_lock ->&____s->seqcount ->quirk_mutex ->&n->list_lock ->remove_cache_srcu ->pool_lock ->hcd_root_hub_lock ->&queue->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&____s->seqcount#2 FD: 1 BD: 102 +.+.: freezer_mutex.wait_lock FD: 1 BD: 2 +...: clock-AF_XDP FD: 1 BD: 1 ....: driver_id_numbers.xa_lock FD: 5 BD: 2 +.+.: (ima_keys_delayed_work).work ->ima_keys_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 84 BD: 1 +.+.: &net->xfrm.xfrm_cfg_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&net->xfrm.xfrm_policy_lock ->rlock-AF_KEY ->&c->lock ->&n->list_lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->&pfk->dump_lock ->pfkey_mutex ->pfkey_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#381 FD: 1 BD: 2 ....: rds_cong_monitor_lock FD: 27 BD: 3 ....: &x->wait#30 ->&p->pi_lock FD: 1 BD: 14 +.+.: udc_lock.wait_lock FD: 27 BD: 1 ++++: &iopt->domains_rwsem ->&iopt->iova_rwsem FD: 1 BD: 52 +.-.: x25_forward_list_lock FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg0#296 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 5 +.+.: quirk_mutex FD: 26 BD: 2 .+.+: rds_ib_devices_lock ->&rq->__lock FD: 1 BD: 2810 ....: cid_lock FD: 31 BD: 1 ..-.: security/integrity/ima/ima_queue_keys.c:35 FD: 1 BD: 2 +...: slock-AF_IEEE802154 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#50 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#92 FD: 26 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#198 FD: 1 BD: 3 ....: &xa->xa_lock#21 FD: 2 BD: 13 ....: &new->fa_lock ->&f->f_owner.lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#496 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#557 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#3 FD: 27 BD: 3 ..-.: &x->wait#28 ->&p->pi_lock FD: 33 BD: 1 +.-.: (&cfile->notify_timer) ->cgroup_file_kn_lock FD: 1 BD: 105 +.+.: wq_pool_attach_mutex.wait_lock FD: 85 BD: 5 .+.+: ehci_cf_port_reset_rwsem ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&c->lock ->hcd_root_hub_lock ->fs_reclaim ->&dum_hcd->dum->lock ->&queue->lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#19 ->&base->lock ->(&timer.timer) ->&hub->status_mutex ->device_state_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&n->list_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu ->pool_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 86 BD: 2 +.+.: (work_completion)(&hcd->wakeup_work) FD: 26 BD: 2 +.+.: &net->ipv4.ra_mutex ->&rq->__lock FD: 1 BD: 4 ....: &sqd->wait FD: 1 BD: 2 +.+.: cancel_lock FD: 1 BD: 5 +.+.: &acct->lock FD: 273 BD: 2 +.+.: (work_completion)(&ctx->exit_work) ->&ctx->uring_lock ->&ctx->completion_lock ->&sqd->lock ->&x->wait#28 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->&x->wait#30 ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&zone->lock ->percpu_ref_switch_lock ->&xa->xa_lock#21 ->&cfs_rq->removed.lock ->pool_lock ->quarantine_lock ->&lock->wait_lock ->&p->pi_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&x->wait#2 ->&meta->lock ->kfence_freelist_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&rnp->exp_lock ->rcu_state.exp_mutex FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#32 FD: 27 BD: 2 ..-.: &x->wait#27 ->&p->pi_lock FD: 1 BD: 1 ....: _rs.lock FD: 13 BD: 198 +...: prog_idr_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#782 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#414 FD: 26 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#677 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#521 ->&rq->__lock FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg1#306 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#585 FD: 39 BD: 2 +.+.: sk_lock-AF_CAIF ->slock-AF_CAIF ->&rq->__lock ->&obj_hash[i].lock ->&this->info_list_lock ->(console_sem).lock ->&x->wait#2 ->&ei->socket.wq.wait ->clock-AF_CAIF ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock FD: 189 BD: 1 +.+.: (wq_completion)hci5#2 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->(work_completion)(&(&conn->disc_work)->work) FD: 26 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#336 FD: 82 BD: 1 +.+.: memcg_oom_lock#2 ->memcg_oom_lock ->oom_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#474 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#251 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#30 FD: 32 BD: 1 +.-.: (&tsk->oom_reaper_timer) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#200 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#660 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#705 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#423 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#884 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#524 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#894 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#897 FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#3 FD: 26 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#8 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#680 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#382 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#4 FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg2#297 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#567 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#568 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#212 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#169 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#642 FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg1#298 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#677 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#23 FD: 41 BD: 103 +.+.: &cache->alloc_lock ->swap_avail_lock ->&p->lock#2 FD: 38 BD: 1 .+.+: &type->s_umount_key#50 ->&lru->node[i].lock ->&dentry->d_lock FD: 1 BD: 104 +.+.: &vmpr->sr_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1139 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#167 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#95 FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg0#304 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg0#303 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 545 +.+.: mmlist_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#753 FD: 32 BD: 1 +.+.: percpu_charge_mutex ->stock_lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#686 ->&rq->__lock FD: 68 BD: 53 +.+.: (wq_completion)wg-kex-wg0#295 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#199 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#3 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#915 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#43 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#403 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#52 FD: 139 BD: 3 +.+.: minor_rwsem#2 ->fs_reclaim ->remove_cache_srcu ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&c->lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->&k->k_lock ->subsys mutex#77 FD: 39 BD: 1 +.+.: (wq_completion)bond0#165 ->(work_completion)(&(&slave->notify_work)->work) FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#32 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#757 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#544 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#336 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#304 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#513 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#461 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#516 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#889 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#587 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#174 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#821 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#483 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#258 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1050 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#859 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#412 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#521 FD: 1 BD: 5 ....: &usblp->wwait FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#195 FD: 1 BD: 52 +...: x25_route_list_lock FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#150 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 68 BD: 53 +.+.: (wq_completion)wg-kex-wg1#308 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#18 FD: 42 BD: 1 .+.+: sb_writers#13 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->mount_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#653 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#526 ->&rq->__lock FD: 1 BD: 56 +.+.: (wq_completion)phy340 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#3 FD: 26 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#8 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#863 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#38 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#612 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#606 ->&rq->__lock FD: 105 BD: 53 +.+.: (wq_completion)wg-crypt-wg0#150 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#333 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#422 FD: 1 BD: 1 ....: _rs.lock#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#508 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#775 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#303 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#202 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#523 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1116 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#278 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#75 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#266 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#400 ->&rq->__lock FD: 1 BD: 14 ....: &f->f_owner.lock FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#149 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 5 +.+.: (wq_completion)tipc_rcv#3 FD: 52 BD: 2 +.+.: sk_lock-AF_PPPOX ->slock-AF_PPPOX ->&pn->hash_lock ->clock-AF_PPPOX ->chan_lock ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->&rnp->exp_lock ->rcu_state.exp_mutex ->&rnp->exp_wq[2] ->&rnp->exp_wq[0] ->&rnp->exp_wq[1] ->&cfs_rq->removed.lock ->pool_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#733 ->&rq->__lock FD: 2 BD: 3 +.+.: &local->sdp_lock ->&local->sockets.lock FD: 1 BD: 3 +...: clock-AF_PPPOX FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#666 ->&rq->__lock FD: 1 BD: 53 ....: (inetaddr_chain).rwsem.wait_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#8 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1104 ->&rq->__lock FD: 4 BD: 4 +.+.: &usblp->mut ->&usblp->wwait ->&usblp->rwait ->&anchor->lock FD: 31 BD: 17 +.+.: sk_lock-AF_TIPC ->&rq->__lock ->slock-AF_TIPC ->&obj_hash[i].lock ->clock-AF_TIPC FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#607 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#384 FD: 3 BD: 2 +.+.: sk_lock-AF_KCM ->slock-AF_KCM ->clock-AF_KCM FD: 1 BD: 3 ....: rlock-AF_KCM FD: 1 BD: 2 +.+.: (work_completion)(&kcm->tx_work) FD: 1 BD: 3 +...: clock-AF_KCM FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#18 FD: 26 BD: 56 +.+.: (wq_completion)phy337 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1142 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1135 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#310 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1143 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#16 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1130 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#7 FD: 26 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#10 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1131 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1050 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#216 FD: 1 BD: 1 ....: &pgdat->reclaim_wait[i] FD: 26 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#9 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1062 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#53 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#627 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1061 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#881 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#2 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#904 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1043 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#619 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#539 FD: 68 BD: 53 +.+.: (wq_completion)wg-kex-wg0#315 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1083 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#961 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#486 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#332 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#747 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#614 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#722 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#606 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#158 ->&rq->__lock FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg0#313 ->(work_completion)(&peer->transmit_handshake_work) FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg1#305 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1090 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#420 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1119 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#783 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#662 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1106 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1108 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#495 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#203 FD: 1 BD: 1 ....: oom_rs.lock FD: 1 BD: 104 +.+.: memcg_oom_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#172 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#513 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#69 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#249 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#492 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#444 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#285 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#965 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#452 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#199 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#422 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#503 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#806 FD: 1 BD: 1 ....: oom_victims_wait.lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#475 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#851 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#423 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#207 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#454 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#271 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#538 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1098 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#208 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1072 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#44 FD: 9 BD: 2 +.+.: (work_completion)(&mm->async_put_work) ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->pool_lock FD: 1 BD: 3 +.+.: binder_procs_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#641 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#228 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#377 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#519 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#383 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#76 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#168 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#36 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#411 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#23 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#925 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#674 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#385 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#938 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#389 ->&rq->__lock FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg2#284 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#177 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#174 FD: 26 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#666 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1074 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#737 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#81 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#211 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#958 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#383 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#377 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#51 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#383 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#666 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#171 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#37 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#36 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#165 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#37 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#168 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#169 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#170 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#171 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#167 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#169 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#166 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#166 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#380 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#164 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#379 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#377 FD: 155 BD: 53 +.+.: team->team_lock_key#159 ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&c->lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&rq->__lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->lweventlist_lock ->(console_sem).lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#21 FD: 39 BD: 1 +.+.: (wq_completion)bond0#163 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#163 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#377 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#376 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#162 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#376 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#375 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#162 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#161 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#374 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#375 FD: 26 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#17 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#17 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1128 FD: 26 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#4 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#62 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#673 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#237 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1133 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#674 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#238 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1146 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#19 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1144 FD: 1 BD: 3 +.+.: chan_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1145 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#683 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#678 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1141 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1134 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1137 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#681 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1134 ->&rq->__lock FD: 1 BD: 5 +.+.: (wq_completion)tipc_send#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1107 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#6 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1053 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1053 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#633 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#11 FD: 26 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#11 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#7 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#7 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1062 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#54 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1061 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#626 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#629 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1055 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1047 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1047 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1025 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1014 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1006 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#197 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#982 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#969 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#963 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#163 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#888 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#888 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#520 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#892 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#894 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#897 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#871 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#890 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#871 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#868 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#490 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#480 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#833 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#835 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#828 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#820 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#468 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#466 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#464 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#458 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#810 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#806 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#803 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#434 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#434 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#645 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#511 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1067 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#108 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#513 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#516 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#509 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#260 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#504 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#501 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#498 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#251 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#494 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#492 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#478 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#245 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#480 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#472 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#239 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1064 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#465 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#462 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#462 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#460 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#232 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#228 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#453 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#227 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#221 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#226 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#220 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#441 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#437 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#438 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#53 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1070 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#223 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#97 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#634 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#20 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#52 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#54 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#420 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#421 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#198 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#202 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#424 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#202 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#204 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#429 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#207 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#432 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#433 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#433 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#430 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#425 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#205 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#419 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#199 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#203 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#486 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#424 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#425 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#201 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#425 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#428 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#203 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#427 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#200 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#426 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#164 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#167 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#384 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#385 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#176 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#386 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#387 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#390 ->&rq->__lock FD: 104 BD: 53 +.+.: (wq_completion)wg-kex-wg1#307 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 104 BD: 53 +.+.: (wq_completion)wg-kex-wg2#304 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 104 BD: 53 +.+.: (wq_completion)wg-kex-wg0#316 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 105 BD: 53 +.+.: (wq_completion)wg-crypt-wg0#160 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 68 BD: 53 +.+.: (wq_completion)wg-kex-wg2#305 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#393 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#395 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#398 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#404 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#188 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#405 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#405 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#195 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#416 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#409 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#197 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#196 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#410 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#406 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#407 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#402 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#396 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#397 FD: 105 BD: 53 +.+.: (wq_completion)wg-crypt-wg2#151 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 105 BD: 53 +.+.: (wq_completion)wg-crypt-wg1#154 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#391 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#183 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#179 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#4 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#164 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#379 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#165 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1124 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1118 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#656 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#989 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#196 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#188 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#961 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#884 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#857 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#842 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#477 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#463 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#793 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#795 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#439 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#762 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#740 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#737 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#688 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#381 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#679 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#371 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#684 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#687 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#395 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#710 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#703 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#716 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#681 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#685 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#679 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#678 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#115 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#689 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#696 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#707 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#722 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#122 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#725 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#735 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#733 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#742 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#742 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#118 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1075 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#419 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#392 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#661 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#668 ->&rq->__lock FD: 39 BD: 1 +.+.: (wq_completion)bond0#168 ->(work_completion)(&(&slave->notify_work)->work) FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#204 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#380 FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg0#307 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 550 +.+.: &((cluster_info + ci)->lock)#2 FD: 58 BD: 1 +.+.: shmem_swaplist_mutex ->&xa->xa_lock#23 ->&info->lock ->&sb->s_type->i_lock_key ->&((cluster_info + ci)->lock)#2 ->&xa->xa_lock#9 FD: 13 BD: 550 ....: &xa->xa_lock#23 ->&c->lock ->pool_lock#2 ->&ctrl->lock#2 ->key#29 ->&rtpn->lock ->&obj_hash[i].lock FD: 32 BD: 549 +.+.: &tree->lock ->&pool->lock#3 ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->&____s->seqcount FD: 32 BD: 103 +.+.: &acomp_ctx->mutex ->scomp_scratch.lock ->&____s->seqcount#2 ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&pool->lock#3 ->&zspage->lock ->&rq->__lock ->&lock->wait_lock FD: 1 BD: 104 +.+.: scomp_scratch.lock FD: 8 BD: 551 +.+.: &pool->lock#3 ->&zspage->lock ->&obj_hash[i].lock ->pool_lock#2 FD: 2 BD: 552 .+.+: &zspage->lock ->lock#10 FD: 1 BD: 553 +.+.: lock#10 FD: 1 BD: 551 ....: &ctrl->lock#2 FD: 3 BD: 566 -.-.: lock#11 ->&lruvec->lru_lock FD: 1 BD: 551 ....: key#29 FD: 1 BD: 551 ..-.: &rtpn->lock FD: 3 BD: 2 +.+.: (work_completion)(&({ do { const void *__vpp_verify = (typeof((&memcg_stock) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&memcg_stock))) *)((&memcg_stock))); (typeof((typeof(*((&memcg_stock))) *)((&memcg_stock)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->stock_lock FD: 41 BD: 545 +.+.: &cache->free_lock ->&p->lock#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1101 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#601 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#70 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#4 FD: 39 BD: 5 +.+.: (wq_completion)bond0#169 ->(work_completion)(&(&slave->notify_work)->work) FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg2#292 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 200 BD: 1 +.+.: (wq_completion)hci0#3 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#8 FD: 26 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1138 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#287 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#179 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#400 FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#146 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1088 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#4 FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg2#296 ->&rq->__lock ->(work_completion)(&peer->transmit_handshake_work) FD: 154 BD: 53 +.+.: team->team_lock_key#165 ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&c->lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->&n->list_lock ->lweventlist_lock ->(console_sem).lock ->quarantine_lock ->remove_cache_srcu ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#4 FD: 1 BD: 3 +.+.: binder_dead_nodes_lock FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#156 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 717 BD: 13 +.+.: &devlink->lock_key#159 ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&c->lock ->&xa->xa_lock#19 ->&n->list_lock ->&rq->__lock ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->rcu_node_0 ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&____s->seqcount#2 ->&____s->seqcount ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#4 FD: 26 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#38 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#39 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#39 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#174 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#173 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#176 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#324 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#4 ->&rq->__lock FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg0#308 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 26 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#3 FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#147 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#173 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#172 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#172 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#171 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#170 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#37 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#379 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#380 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#378 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#163 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#166 FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg1#290 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg2#285 ->(work_completion)(&peer->transmit_handshake_work) FD: 39 BD: 1 +.+.: (wq_completion)bond0#159 ->(work_completion)(&(&slave->notify_work)->work) FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg1#300 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#382 ->&rq->__lock FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg1#289 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#378 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#10 FD: 26 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#13 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#12 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#13 FD: 26 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#14 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#18 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#19 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#25 FD: 26 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#14 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#15 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#15 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#13 FD: 26 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#16 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1127 FD: 26 BD: 2 +.+.: (wq_completion)nfc40_nci_cmd_wq#4 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#11 ->&rq->__lock FD: 155 BD: 53 +.+.: team->team_lock_key#172 ->&rq->__lock ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&c->lock ->&n->list_lock ->&dir->lock#2 ->input_pool.lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->lweventlist_lock ->&____s->seqcount#2 ->&____s->seqcount ->(console_sem).lock ->quarantine_lock ->remove_cache_srcu FD: 26 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#11 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#13 FD: 39 BD: 1 +.+.: (wq_completion)bond0#172 ->&rq->__lock ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#662 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1126 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#230 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1122 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#663 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 200 BD: 1 +.+.: (wq_completion)hci0#5 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1121 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1121 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1120 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1117 FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#24 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc44_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_tx_wq FD: 26 BD: 2 +.+.: (wq_completion)nfc42_nci_cmd_wq ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#11 FD: 26 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#10 FD: 26 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#10 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1111 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#23 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#21 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1112 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1110 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1109 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1108 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#662 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1104 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#659 ->&rq->__lock FD: 26 BD: 56 +.+.: (wq_completion)phy342 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1103 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1103 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1102 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#657 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1099 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1098 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1097 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1095 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1096 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#9 FD: 26 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1092 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#10 FD: 155 BD: 53 +.+.: team->team_lock_key#171 ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&rq->__lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->&c->lock ->&n->list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->&____s->seqcount#2 ->&____s->seqcount ->lweventlist_lock ->(console_sem).lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1091 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#653 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1091 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1090 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1089 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1089 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#227 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_rx_wq#1087 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1087 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1086 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1085 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1082 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1084 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#645 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#644 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1079 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1078 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1073 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1071 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1071 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#637 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#641 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1067 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#638 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#222 FD: 26 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#12 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#20 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#635 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#10 FD: 26 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#8 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#8 FD: 26 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#8 FD: 26 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#13 FD: 26 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#13 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1045 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1044 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1044 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#619 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#616 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#213 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1041 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1040 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1038 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#614 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1038 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1036 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#210 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#209 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#50 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#209 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1034 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#207 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#613 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#609 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1032 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1032 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#48 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1030 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1030 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1029 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1028 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1027 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1026 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1026 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1025 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1023 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#607 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#603 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#601 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#602 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1022 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#203 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1021 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1022 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#201 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1020 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1021 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1021 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#200 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#598 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#604 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1020 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1018 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1019 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1019 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1017 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1018 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1015 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#600 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1012 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#46 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1010 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#599 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#597 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1008 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1007 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#596 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#589 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1006 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#588 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#594 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#590 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1003 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#593 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#592 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1001 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#591 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#590 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#999 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#999 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#998 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#997 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#582 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#995 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#995 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#994 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#585 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#579 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#986 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#10 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#985 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#577 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#983 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#983 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#981 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#981 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#576 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#978 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#569 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#977 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#573 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#43 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#973 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#973 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#971 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#969 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#186 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#966 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#568 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#567 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#41 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#184 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#185 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#560 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#958 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#957 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#956 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#183 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#954 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#559 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#182 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#951 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#949 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#555 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#40 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#179 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#553 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#944 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#547 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#549 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#942 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#542 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#178 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#175 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#547 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#542 FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#545 FD: 26 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#4 FD: 26 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#5 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#4 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#5 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#934 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#933 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#932 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#931 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#929 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#927 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#541 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#927 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#927 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#540 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#925 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#924 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#538 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#921 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#537 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#919 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#917 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#170 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#917 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#911 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#908 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#168 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#906 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#530 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#907 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#529 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#526 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#899 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#164 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#882 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#877 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#513 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#878 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#876 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#875 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#874 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#158 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#870 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#504 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#503 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#862 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#862 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#495 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#857 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#856 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#854 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#491 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#853 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#486 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#850 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#849 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#156 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#847 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#488 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#844 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#845 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#843 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#843 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#153 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#484 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#840 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#838 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#838 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#835 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#831 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#829 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#827 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#826 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#824 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#824 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#146 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#817 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#812 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#462 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#460 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#809 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#453 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#805 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#803 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#456 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#801 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#801 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#797 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#797 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#795 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#447 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#792 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#450 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#790 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#448 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#787 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#788 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#786 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#447 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#784 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#785 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#782 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#781 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#779 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#778 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#777 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#560 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#563 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#437 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#772 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#431 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#761 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#761 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#763 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#764 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#767 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#767 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#436 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#768 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#758 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#759 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#755 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#757 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#744 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#746 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#420 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#749 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#749 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#751 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#424 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#367 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#755 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#671 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#669 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#361 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#666 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#665 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#664 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#362 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#662 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#660 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#660 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#659 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#358 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#110 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#656 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#655 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#653 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#652 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#111 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#650 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#649 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#648 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#352 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#646 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#644 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#644 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#640 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#346 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#638 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#638 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#634 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#633 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#632 ->&rq->__lock FD: 68 BD: 53 +.+.: (wq_completion)wg-kex-wg0#317 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#630 FD: 68 BD: 53 +.+.: (wq_completion)wg-kex-wg1#309 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#628 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#626 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#627 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#626 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#625 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#335 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#624 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#623 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#621 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#619 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#326 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#609 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#325 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#607 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#606 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#605 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#604 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#603 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#100 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#599 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#320 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#319 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#594 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#595 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#98 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#594 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#591 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#589 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#315 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#588 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#588 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#587 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#585 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#583 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#581 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#579 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#577 FD: 1 BD: 5 +.+.: (wq_completion)tipc_crypto FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#575 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#307 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#302 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#570 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#569 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#568 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#305 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#566 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#564 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#563 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#297 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#562 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#300 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#561 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#561 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#298 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#559 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#559 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#558 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#557 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#90 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#295 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#555 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#554 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#288 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#552 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#293 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#549 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#547 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#287 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#545 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#543 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#542 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#541 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#540 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#538 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#87 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#286 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#533 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#534 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#277 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#532 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#531 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#280 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#530 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#277 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#527 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#527 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#84 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#274 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#261 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#254 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#507 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#77 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#78 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#77 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#76 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#256 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#25 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#255 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#254 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#487 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#243 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#67 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#482 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#239 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#243 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#475 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#64 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#469 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#469 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#23 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#236 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#228 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#233 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#458 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#60 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#455 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#454 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#59 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#449 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#449 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#445 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#444 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#443 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#442 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#439 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#210 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#435 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#53 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#206 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#426 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#202 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#418 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#418 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#196 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#416 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#376 FD: 26 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#16 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1126 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1118 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#664 FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#606 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1008 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#992 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#992 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#980 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#976 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#971 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#972 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#569 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#564 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#951 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#900 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#895 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#885 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#497 ->&rq->__lock FD: 189 BD: 12 +.+.: (wq_completion)hci3#2 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#851 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#477 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#837 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#821 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#817 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#798 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#798 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#185 FD: 1 BD: 2 +.+.: (wq_completion)nfc40_nci_cmd_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#445 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#779 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#769 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#689 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#429 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#743 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#740 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#407 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#722 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#719 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#691 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#695 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#700 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#700 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#703 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#390 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#397 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#710 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#394 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#718 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#715 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#717 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#729 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#406 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#731 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#735 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#618 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#743 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#96 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#314 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#175 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#330 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#258 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#629 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#617 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#104 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#328 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#613 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#611 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#720 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#395 FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#145 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#381 FD: 189 BD: 12 +.+.: (wq_completion)hci0#4 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#378 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#677 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#672 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#695 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#707 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#727 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#751 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#746 FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#153 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#159 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#3 FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg2#302 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#381 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#207 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#129 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#3 FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#150 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#791 FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg2#303 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#472 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#823 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#836 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#832 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#489 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#165 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#529 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#421 FD: 717 BD: 13 +.+.: &devlink->lock_key#165 ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&c->lock ->&n->list_lock ->&rq->__lock ->&xa->xa_lock#19 ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->rcu_node_0 ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#56 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#456 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#238 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#455 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#657 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#385 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1096 FD: 200 BD: 1 +.+.: (wq_completion)hci5 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#937 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#584 ->&rq->__lock FD: 1 BD: 2 +...: rlock-AF_CAIF FD: 717 BD: 13 +.+.: &devlink->lock_key#163 ->crngs.lock ->fs_reclaim ->&c->lock ->devlinks.xa_lock ->&xa->xa_lock#19 ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->rcu_node_0 ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->&(&fn_net->fib_chain)->lock ->rtnl_mutex.wait_lock ->&p->pi_lock ->&n->list_lock ->stack_depot_init_mutex ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock FD: 26 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#408 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#623 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#551 FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#142 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 26 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#7 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1087 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#866 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#914 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#570 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#226 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#357 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#650 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#640 ->&rq->__lock FD: 200 BD: 1 +.+.: (wq_completion)hci3 ->&rq->__lock ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#503 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#170 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#322 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#633 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#382 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#759 ->&rq->__lock FD: 155 BD: 53 +.+.: team->team_lock_key#163 ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&c->lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->lweventlist_lock ->(console_sem).lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1082 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1100 ->&rq->__lock FD: 1 BD: 3 +...: slock-AF_CAIF FD: 1 BD: 3 +...: clock-AF_CAIF FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#9 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#648 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1085 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#674 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#369 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1080 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#601 ->&rq->__lock FD: 200 BD: 1 +.+.: (wq_completion)hci1 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#38 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#337 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#615 FD: 39 BD: 1 +.+.: (wq_completion)bond0#171 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#581 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#3 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#693 ->&rq->__lock FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#154 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#565 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#669 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#181 FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg1#297 ->(work_completion)(&peer->transmit_handshake_work) FD: 2 BD: 2 +.+.: &pfk->dump_lock ->&net->xfrm.xfrm_policy_lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#556 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#556 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#391 FD: 26 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#11 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#953 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#501 FD: 200 BD: 1 +.+.: (wq_completion)hci4 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#386 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#50 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#424 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#378 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#712 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#400 FD: 39 BD: 5 +.+.: (wq_completion)bond0#170 ->(work_completion)(&(&slave->notify_work)->work) ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#201 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#327 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#175 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#178 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#384 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#175 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#176 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#177 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#40 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#40 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#39 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#386 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#385 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#180 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#177 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#178 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#41 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#41 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#40 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#387 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#388 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#388 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#387 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#389 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#181 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#388 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#178 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#179 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#390 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#389 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#42 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#390 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#42 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#17 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#17 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#182 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#179 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#180 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#41 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#392 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#391 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#180 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#181 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#43 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#42 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#393 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#392 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#184 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#181 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#182 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#44 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#43 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#394 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#393 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#395 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#394 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#185 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#182 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#183 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#396 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#395 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#397 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#396 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#186 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#183 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#184 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#397 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#187 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#184 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#185 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#188 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#399 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#399 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#398 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#189 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#186 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#186 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#399 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#401 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#401 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#400 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#402 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#401 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#190 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#187 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#187 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#403 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#402 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#191 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#404 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#403 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#188 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#192 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#404 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#189 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#189 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#45 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#45 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#44 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#406 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#405 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#407 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#406 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#193 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#190 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#190 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#408 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#407 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#409 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#408 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#194 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#191 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#191 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#410 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#409 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#411 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#410 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#195 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#192 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#192 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#412 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#411 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#46 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#193 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#193 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#413 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#46 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#413 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#45 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#412 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#414 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#413 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#198 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#194 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#194 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#47 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#47 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#46 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#48 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#48 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#47 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#415 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#415 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#414 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#415 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#196 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#417 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#417 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#416 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#49 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#49 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#48 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#417 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#50 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#49 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#419 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#419 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#418 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#201 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#197 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#197 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#52 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#51 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#50 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#428 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#209 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#205 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#427 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#204 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#54 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#53 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#429 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#428 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#210 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#206 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#205 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#430 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#429 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#211 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#206 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#208 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#207 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#431 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#213 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#431 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#430 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#209 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#208 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#55 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#54 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#21 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#432 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#432 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#431 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#214 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#210 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#209 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#434 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#434 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#433 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#215 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#211 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#435 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#434 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#216 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#212 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#436 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#436 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#435 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#217 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#213 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#212 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#57 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#56 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#55 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#437 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#436 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#218 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#214 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#213 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#58 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#57 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#56 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#438 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#437 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#439 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#438 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#219 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#215 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#214 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#440 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#440 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#439 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#216 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#215 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#441 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#440 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#442 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#441 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#221 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#217 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#216 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#443 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#442 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#222 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#218 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#217 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#223 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#219 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#218 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#59 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#58 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#57 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#445 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#444 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#224 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#446 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#446 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#445 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#225 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#219 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#447 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#447 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#446 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#222 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#220 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#448 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#448 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#447 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#448 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#223 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#221 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#450 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#450 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#449 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#58 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#451 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#451 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#450 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#452 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#451 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#224 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#222 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#61 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#453 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#452 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#60 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#59 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#229 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#225 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#223 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#453 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#230 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#62 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#226 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#224 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#61 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#60 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#454 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#231 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#227 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#225 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#456 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#455 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#228 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#226 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#457 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#457 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#456 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#458 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#457 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#459 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#459 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#458 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#460 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#459 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#229 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#227 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#461 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#461 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#460 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#234 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#230 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#461 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#235 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#231 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#229 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#63 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#463 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#463 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#462 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#62 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#61 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#232 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#230 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#464 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#464 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#463 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#465 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#464 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#466 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#466 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#465 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#466 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#237 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#64 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#63 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#233 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#62 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#231 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#22 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#22 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#468 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#468 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#467 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#468 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#470 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#470 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#469 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#234 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#232 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#65 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#63 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#471 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#471 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#470 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#235 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#233 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#472 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#471 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#240 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#236 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#234 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#473 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#473 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#472 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#241 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#237 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#235 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#474 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#473 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#242 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#238 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#236 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#475 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#474 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#476 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#476 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#237 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#475 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#66 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#65 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#64 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#477 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#476 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#478 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#477 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#479 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#479 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#478 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#244 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#67 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#66 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#65 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#240 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#238 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#480 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#479 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#241 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#239 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#481 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#481 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#480 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#68 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#66 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#482 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#481 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#246 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#242 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#483 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#483 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#240 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#482 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#69 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#68 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#24 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#67 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#23 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#484 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#483 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#247 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#241 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#485 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#485 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#484 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#70 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#248 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#69 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#68 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#242 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#486 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#485 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#487 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#486 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#245 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#243 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#488 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#488 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#487 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#71 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#70 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#489 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#489 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#488 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#490 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#489 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#250 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#246 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#72 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#244 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#71 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#245 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#491 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#491 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#490 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#73 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#72 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#71 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#491 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#493 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#493 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#492 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#252 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#248 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#246 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#494 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#493 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#253 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#249 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#247 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#495 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#494 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#496 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#495 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#250 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#248 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#497 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#74 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#497 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#73 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#72 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#496 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#498 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#497 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#499 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#499 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#249 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#498 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#75 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#74 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#73 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#24 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#24 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#500 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#500 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#499 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#501 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#500 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#252 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#250 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#502 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#502 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#501 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#74 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#503 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#502 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#75 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#504 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#503 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#6 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#257 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#6 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#253 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#251 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#26 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#25 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#25 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#505 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#504 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#254 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#252 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#76 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#506 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#506 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#505 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#259 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#255 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#253 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#27 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#26 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#26 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#507 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#506 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#79 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#78 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#77 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#80 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#79 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#78 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#257 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#255 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#508 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#508 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#507 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#80 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#79 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#262 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#256 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#263 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#259 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#257 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#509 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#508 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#264 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#260 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#258 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#510 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#510 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#509 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#510 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#265 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#261 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#259 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#82 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#81 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#80 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#512 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#512 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#511 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#512 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#262 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#260 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#514 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#514 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#267 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#263 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#515 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#261 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#515 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#514 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#516 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#515 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#268 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#262 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#517 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#517 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#516 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#518 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#518 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#269 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#265 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#263 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#270 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#519 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#518 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#266 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#264 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#267 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#265 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#83 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#82 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#81 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#520 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#520 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#519 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#520 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#272 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#268 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#266 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#522 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#521 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#84 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#83 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#82 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#523 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#522 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#524 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#523 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#269 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#267 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#525 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#525 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#524 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#270 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#268 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#526 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#525 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#275 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#271 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#85 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#269 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#83 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#28 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#27 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#27 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#276 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#272 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#270 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#526 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#273 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#271 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#527 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#86 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#85 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#84 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#274 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#272 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#529 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#529 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#530 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#529 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#279 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#275 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#273 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#276 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#274 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#531 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#530 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#87 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#86 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#85 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#532 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#531 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#281 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#275 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#278 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#276 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#533 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#533 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#532 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#88 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#87 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#86 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#283 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#279 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#277 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#534 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#284 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#280 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#278 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#535 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#281 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#279 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#535 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#534 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#282 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#536 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#280 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#536 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#535 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#89 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#88 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#537 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#537 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#536 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#283 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#281 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#537 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#288 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#284 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#282 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#539 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#539 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#538 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#540 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#539 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#541 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#540 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#289 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#285 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#283 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#542 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#541 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#90 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#89 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#88 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#29 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#28 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#28 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#543 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#543 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#542 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#544 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#545 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#544 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#290 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#286 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#284 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#546 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#546 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#545 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#285 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#547 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#546 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#292 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#288 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#548 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#286 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#548 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#547 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#289 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#548 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#287 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#91 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#90 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#89 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#550 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#550 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#549 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#551 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#550 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#552 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#551 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#294 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#290 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#553 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#553 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#552 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#554 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#553 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#555 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#554 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#291 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#289 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#296 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#556 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#292 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#290 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#92 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#91 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#555 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#297 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#556 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#93 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#91 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#293 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#291 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#558 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#557 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#558 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#294 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#292 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#560 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#559 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#299 FD: 200 BD: 1 +.+.: (wq_completion)hci2 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 189 BD: 12 +.+.: (wq_completion)hci2#2 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#295 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#293 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#560 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#296 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#294 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#94 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#93 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#92 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#562 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#561 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#301 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#295 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#562 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#302 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#298 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#296 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#303 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#564 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#299 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#563 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#565 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#564 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#300 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#298 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#566 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#565 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#567 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#566 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#567 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#301 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#299 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#569 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#568 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#569 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#571 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#571 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#570 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#572 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#572 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#571 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#306 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#300 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#573 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#573 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#572 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#303 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#301 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#574 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#574 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#573 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#575 ->&rq->__lock FD: 1 BD: 5 +.+.: (wq_completion)tipc_rcv FD: 1 BD: 5 +.+.: (wq_completion)tipc_send FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#308 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#574 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#304 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#302 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#576 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#576 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#575 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#577 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#576 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#578 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#578 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#577 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#579 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#578 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#309 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#305 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#580 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#580 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#579 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#310 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#306 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#304 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#311 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#582 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#582 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#307 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#581 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#305 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#582 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#584 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#312 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#583 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#308 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#306 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#95 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#94 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#93 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#584 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#313 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#309 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#307 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#94 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#586 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#586 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#585 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#96 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#95 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#586 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#310 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#30 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#29 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#308 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#29 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#587 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#311 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#309 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1100 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#589 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#588 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#316 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#312 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#590 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#590 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#589 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#591 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#590 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#592 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#317 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#592 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#591 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#313 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#311 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#96 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#593 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#593 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#592 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#593 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#318 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#314 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#312 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#595 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#99 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#98 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#97 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#594 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#596 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#596 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#595 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#597 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#597 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#596 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#315 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#313 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#598 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#598 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#597 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#316 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#314 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#599 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#598 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#600 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#600 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#599 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#321 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#317 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#99 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#315 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#98 ->&rq->__lock FD: 1 BD: 56 +.+.: (wq_completion)phy339 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#600 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#602 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#602 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#601 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#318 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#316 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#603 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#602 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#604 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#603 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#605 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#604 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#323 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#319 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#317 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#605 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#324 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#101 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#320 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#318 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#100 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#99 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#321 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#319 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#608 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#608 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#607 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#102 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#101 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#100 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#608 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#610 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#610 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#609 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#322 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#320 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#611 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#610 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#612 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#611 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#327 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#323 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#321 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#103 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#102 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#101 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#322 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#613 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#612 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#614 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#613 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#615 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#614 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#616 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#615 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#329 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#325 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#323 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#326 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#324 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#617 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#616 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#103 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#102 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#618 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#617 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#331 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#327 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#325 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#619 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#619 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#618 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#328 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#326 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#620 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#620 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#621 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#105 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#104 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#103 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#620 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#333 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#329 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#622 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#622 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#621 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#622 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#334 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#330 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#328 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#624 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#623 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#331 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#329 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#625 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#624 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#332 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#330 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#331 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#106 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#105 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#104 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#338 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#334 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#626 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#625 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#332 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#627 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#339 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#335 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#333 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#628 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#627 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#340 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#334 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#629 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#628 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#341 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#337 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#335 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#342 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#630 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#338 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#336 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#629 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#631 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#631 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#630 FD: 104 BD: 53 +.+.: (wq_completion)wg-kex-wg0#318 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 104 BD: 53 +.+.: (wq_completion)wg-kex-wg2#306 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 104 BD: 53 +.+.: (wq_completion)wg-kex-wg1#310 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 105 BD: 53 +.+.: (wq_completion)wg-crypt-wg1#155 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 105 BD: 53 +.+.: (wq_completion)wg-crypt-wg2#152 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 105 BD: 53 +.+.: (wq_completion)wg-crypt-wg0#161 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 68 BD: 53 +.+.: (wq_completion)wg-kex-wg2#307 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#632 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#631 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#343 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#339 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#337 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#107 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#106 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#105 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#632 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#634 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#633 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#344 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#340 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#635 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#634 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#338 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#107 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#106 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#636 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#636 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#635 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#637 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#637 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#636 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#637 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#345 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#341 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#339 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#639 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#639 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#638 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#109 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#108 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#107 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#342 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#340 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#639 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#347 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#343 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#341 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#641 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#640 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#348 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#344 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#342 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#642 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#641 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#349 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#345 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#343 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#643 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#643 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#642 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#350 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#346 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#344 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#643 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#645 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#644 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#646 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#645 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#351 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#347 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#345 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#647 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#647 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#646 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#348 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#346 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#647 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#353 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#349 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#347 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#110 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#109 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#108 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#31 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#30 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#649 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#649 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#648 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#354 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#350 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#348 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#651 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#651 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#650 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#110 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#109 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#652 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#651 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#652 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#355 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#351 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#349 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#654 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#654 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#653 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#356 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#352 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#350 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#655 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#654 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#656 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#655 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#353 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#351 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#112 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#111 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#657 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#657 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#656 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#354 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#352 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#658 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#658 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#657 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#359 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#355 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#353 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#659 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#658 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#360 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#356 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#659 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#354 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#113 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#112 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#111 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#661 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#660 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#361 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#357 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#355 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#661 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#358 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#356 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#663 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#663 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#662 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#363 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#359 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#357 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#664 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#663 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#364 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#360 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#358 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#665 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#664 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#665 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#365 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#359 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#667 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#667 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#366 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#362 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#360 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#668 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#667 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#363 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#361 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#668 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#368 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#364 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#362 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#670 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#670 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#669 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#671 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#670 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#369 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#365 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#363 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#671 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#370 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#366 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#364 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#114 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#113 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#112 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#673 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#673 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#672 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#367 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#365 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#673 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#372 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#368 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#366 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#675 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#675 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#674 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#373 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#367 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#676 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#676 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#675 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#676 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#374 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#370 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#368 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#678 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#677 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#375 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#371 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#369 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#678 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#376 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#372 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#370 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#680 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#680 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#679 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#114 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#113 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#681 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#680 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#373 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#371 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#682 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#682 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#681 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#374 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#372 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#683 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#683 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#682 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#379 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#375 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#373 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#684 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#683 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#380 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#376 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#374 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#685 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#684 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#377 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#375 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#686 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#685 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#382 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#687 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#378 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#376 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#686 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#688 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#687 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#383 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#379 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#377 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#688 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#384 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#380 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#378 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#690 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#690 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#689 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#385 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#381 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#379 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#691 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#690 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#692 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#691 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#386 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#382 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#380 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#693 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#692 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#387 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#694 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#694 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#693 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#383 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#381 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#694 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#388 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#384 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#382 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#696 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#116 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#115 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#695 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#114 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#389 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#383 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#697 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#697 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#696 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#698 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#697 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#390 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#386 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#384 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#699 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#699 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#698 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#117 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#116 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#699 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#391 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#387 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#385 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#701 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#701 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#700 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#117 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#702 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#116 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#702 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#701 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#702 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#392 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#388 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#386 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#704 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#704 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#703 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#119 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#118 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#117 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#389 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#387 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#705 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#704 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#394 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#390 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#388 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#391 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#706 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#389 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#706 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#705 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#706 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#396 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#392 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#708 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#708 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#707 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#393 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#391 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#709 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#709 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#708 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#710 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#709 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#398 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#394 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#392 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#711 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#711 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#712 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#711 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#399 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#393 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#713 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#713 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#712 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#396 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#714 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#714 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#713 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#715 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#714 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#401 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#397 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#395 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#715 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#402 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#717 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#398 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#717 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#396 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#716 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#120 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#119 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#118 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#718 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#719 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#718 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#403 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#399 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#121 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#120 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#119 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#397 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#720 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#719 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#404 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#400 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#398 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#721 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#721 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#720 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#405 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#721 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#401 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#399 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#121 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#120 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#723 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#723 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#724 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#724 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#723 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#725 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#724 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#402 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#400 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#726 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#726 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#725 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#727 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#726 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#403 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#401 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#728 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#728 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#727 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#408 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#404 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#402 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#729 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#728 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#409 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#405 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#403 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#730 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#729 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#410 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#406 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#404 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#731 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#730 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#732 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#411 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#732 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#731 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#407 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#405 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#732 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#412 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#408 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#406 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#734 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#734 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#733 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#413 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#409 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#407 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#414 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#410 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#408 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#123 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#122 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#121 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#736 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#736 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#735 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#736 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#415 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#738 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#411 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#409 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#738 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#124 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#123 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#122 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#739 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#739 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#738 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#739 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#416 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#412 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#410 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#125 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#124 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#123 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#741 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#741 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#740 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#126 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#741 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#125 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#124 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#31 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#31 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#417 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#413 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#411 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#742 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#418 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#127 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#414 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#126 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#412 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#125 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#32 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#744 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#415 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#413 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#743 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#128 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#127 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#126 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#745 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#745 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#744 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#34 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#33 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#33 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#745 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#416 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#414 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#747 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#746 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#421 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#417 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#415 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#748 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#748 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#747 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#748 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#750 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#750 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#749 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#422 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#418 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#416 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#750 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#423 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#419 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#417 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#752 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#752 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#751 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#420 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#418 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#753 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#752 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#425 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#421 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#419 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#754 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#754 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#753 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#426 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#422 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#420 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#754 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#427 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#128 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#127 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#756 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#423 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#421 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#756 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#755 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#756 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#428 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#424 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#422 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#758 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#758 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#757 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#425 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#423 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#130 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#129 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#128 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#430 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#426 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#424 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#131 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#760 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#130 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#129 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#760 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#759 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#431 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#427 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#425 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#760 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#432 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#428 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#426 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#762 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#132 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#131 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#761 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#130 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#763 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#762 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#433 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#429 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#427 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#764 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#763 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#430 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#428 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#765 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#765 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#764 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#435 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#766 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#766 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#765 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#431 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#429 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#766 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#432 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#430 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#768 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#437 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#433 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#767 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#35 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#34 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#34 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#133 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#132 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#131 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#769 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#769 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#768 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#770 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#770 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#438 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#434 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#432 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#771 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#771 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#770 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#435 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#433 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#772 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#440 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#771 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#436 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#134 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#133 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#132 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#773 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#773 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#772 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#774 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#774 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#773 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#435 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#135 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#134 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#133 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#775 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#774 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#442 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#776 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#438 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#436 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#776 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#775 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#777 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#776 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#778 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#777 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#443 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#439 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#437 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#136 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#135 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#134 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#778 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#444 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#440 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#438 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#780 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#780 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#779 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#441 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#781 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#780 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#439 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#781 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#446 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#442 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#440 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#783 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#782 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#784 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#783 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#785 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#784 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#786 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#785 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#443 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#441 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#787 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#786 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#788 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#787 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#789 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#789 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#788 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#444 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#442 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#790 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#449 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#789 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#445 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#443 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#791 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#446 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#790 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#444 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#792 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#791 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#793 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#792 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#451 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#445 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#137 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#136 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#135 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#794 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#794 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#793 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#794 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#452 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#448 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#446 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#796 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#796 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#795 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#453 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#449 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#447 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#454 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#797 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#796 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#450 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#448 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#138 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#137 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#136 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#455 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#451 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#449 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#799 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#799 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#798 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#139 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#138 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#137 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#800 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#800 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#799 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#800 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#452 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#450 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#802 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#802 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#801 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#457 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#453 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#451 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#140 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#139 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#138 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#802 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#458 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#454 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#452 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#141 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#140 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#139 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#804 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#804 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#803 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#805 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#805 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#804 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#807 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#142 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#807 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#806 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#141 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#459 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#140 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#455 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#808 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#808 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#807 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#143 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#809 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#142 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#456 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#454 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#141 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#808 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#35 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#35 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#810 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#809 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#457 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#811 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#811 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#455 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#810 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#144 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#143 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#142 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#812 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#811 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#813 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#813 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#812 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#458 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#456 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#459 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#457 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#814 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#814 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#813 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#460 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#815 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#815 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#814 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#145 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#144 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#143 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#816 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#816 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#815 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#816 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#465 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#461 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#459 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#145 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#144 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#818 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#818 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#462 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#460 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#819 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#819 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#818 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#467 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#147 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#463 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#146 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#145 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#461 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#820 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#819 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#464 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#462 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#821 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#820 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#469 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#465 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#463 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#822 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#822 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#148 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#147 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#146 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#823 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#822 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#470 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#466 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#464 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#823 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#471 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#467 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#465 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#825 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#825 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#149 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#824 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#148 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#147 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#826 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#825 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#468 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#466 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#827 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#827 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#826 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#473 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#469 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#467 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#828 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#828 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#474 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#829 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#470 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#150 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#149 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#148 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#468 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#830 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#830 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#829 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#831 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#830 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#471 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#469 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#151 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#150 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#149 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#832 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#831 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#476 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#472 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#470 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#833 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#832 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#473 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#471 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#834 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#834 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#833 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#478 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#474 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#472 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#834 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#479 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#475 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#473 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#480 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#836 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#835 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#476 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#474 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#837 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#481 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#477 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#475 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#836 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#838 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#837 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#839 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#839 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#152 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#151 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#150 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#482 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#478 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#476 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#840 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#839 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#479 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#153 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#152 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#151 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#841 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#841 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#840 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#478 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#842 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#841 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#154 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#152 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#485 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#481 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#479 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#842 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#482 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#480 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#155 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#154 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#153 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#844 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#843 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#487 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#483 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#481 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#845 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#844 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#484 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#482 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#846 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#846 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#845 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#847 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#846 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#485 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#483 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#155 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#154 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#848 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#848 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#847 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#486 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#484 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#849 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#848 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#491 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#487 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#485 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#850 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#849 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#492 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#488 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#851 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#850 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#493 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#489 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#487 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#852 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#852 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#494 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#490 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#488 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#853 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#852 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#495 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#489 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#854 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#496 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#853 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#492 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#490 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#855 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#855 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#854 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#493 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#491 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#856 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#855 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#157 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#156 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#155 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#498 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#494 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#492 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#856 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#858 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#858 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#499 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#857 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#493 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#859 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#858 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#500 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#496 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#494 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#860 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#859 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#497 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#495 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#861 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#502 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#498 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#861 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#860 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#496 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#158 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#157 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#156 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#861 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#863 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#862 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#499 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#497 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#864 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#864 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#863 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#500 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#498 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#865 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#865 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#864 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#866 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#865 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#867 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#867 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#866 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#868 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#867 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#159 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#157 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#869 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#869 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#868 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#505 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#501 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#499 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#870 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#869 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#871 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#870 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#506 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#502 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#500 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#872 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#872 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#507 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#501 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#873 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#873 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#872 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#874 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#873 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#508 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#504 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#502 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#875 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#874 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#509 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#505 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#503 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#160 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#159 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#876 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#510 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#875 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#506 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#504 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#161 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#160 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#159 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#877 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#877 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#876 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#878 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#878 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#511 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#507 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#505 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#879 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#879 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#512 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#506 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#880 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#880 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#879 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#509 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#507 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#881 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#881 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#880 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#514 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#510 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#508 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#882 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#515 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#511 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#509 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#162 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#161 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#160 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#883 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#883 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#882 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#883 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#512 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#510 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#885 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#884 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#517 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#513 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#511 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#886 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#886 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#885 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#162 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#161 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#887 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#887 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#886 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#887 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#518 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#514 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#512 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#889 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#888 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#519 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#515 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#513 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#164 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#163 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#162 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#890 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#889 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#891 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#891 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#890 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#892 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#891 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#516 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#514 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#893 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#893 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#521 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#892 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#517 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#515 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#893 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#895 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#894 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#522 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#518 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#516 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#164 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#523 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#519 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#163 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#517 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#896 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#896 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#895 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#896 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#524 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#520 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#518 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#166 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#525 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#521 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#519 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#898 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#165 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#898 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#897 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#899 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#898 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#522 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#520 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#527 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#899 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#523 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#521 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#167 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#166 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#165 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#901 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#901 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#900 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#524 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#522 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#902 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#902 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#901 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#525 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#523 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#903 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#903 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#902 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#904 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#903 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#905 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#905 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#904 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#526 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#524 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#906 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#905 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#531 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#527 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#525 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#907 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#906 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#908 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#907 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#909 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#909 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#532 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#908 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#528 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#526 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#167 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#166 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#910 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#910 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#909 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#911 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#910 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#533 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#912 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#912 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#911 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#527 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#169 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#168 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#167 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#913 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#913 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#912 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#914 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#913 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#915 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#914 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#534 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#530 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#169 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#168 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#916 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#535 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#916 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#531 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#529 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#915 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#916 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#536 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#532 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#530 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#918 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#918 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#917 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#918 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#533 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#531 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#920 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#919 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#534 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#921 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#532 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#920 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#922 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#922 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#921 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#923 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#923 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#922 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#924 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#923 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#535 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#533 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#171 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#170 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#169 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#924 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#536 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#534 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#926 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#926 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#925 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#926 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#537 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#535 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#928 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#928 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#542 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#538 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#536 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#929 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#928 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#930 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#930 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#543 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#931 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#539 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#537 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#930 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#932 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#931 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#933 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#932 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#934 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#933 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#5 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#5 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#6 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#6 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#4 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#4 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#544 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#540 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#538 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#935 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#935 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#934 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#172 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#171 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#170 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#37 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#36 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#36 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#6 FD: 26 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#6 FD: 26 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#6 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#173 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#172 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#171 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#6 FD: 26 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#7 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#7 FD: 26 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#8 FD: 26 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#6 FD: 26 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#6 FD: 26 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#4 FD: 26 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#6 FD: 26 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#5 FD: 26 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#936 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#936 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#935 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#541 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#539 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#38 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#37 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#37 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#174 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#173 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#172 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc41_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc40_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#2 FD: 26 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#2 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#6 FD: 26 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#39 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#38 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#38 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#175 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#174 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#173 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#546 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#540 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#937 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#937 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#936 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#938 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#176 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#175 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#174 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#939 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#543 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#541 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#939 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#938 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#177 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#176 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#940 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#940 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#939 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#177 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#176 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#548 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#544 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#941 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#941 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#940 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#942 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#941 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#545 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#543 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#943 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#943 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#942 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#550 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#546 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#544 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#178 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#177 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#944 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#943 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#551 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#547 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#545 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#945 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#945 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#944 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#552 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#548 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#546 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#549 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#554 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#550 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#946 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#548 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#946 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#945 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#180 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#178 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#39 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#39 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#947 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#947 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#946 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#551 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#549 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#948 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#948 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#947 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#552 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#550 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#949 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#948 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#557 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#180 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#179 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#553 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#551 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#950 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#950 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#949 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#950 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#558 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#554 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#552 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#952 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#952 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#951 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#555 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#553 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#953 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#953 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#952 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#560 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#181 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#180 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#556 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#554 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#954 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#955 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#955 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#954 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#561 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#557 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#555 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#182 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#181 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#956 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#955 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#957 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#956 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#562 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#558 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#556 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#563 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#559 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#958 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#957 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#557 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#184 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#183 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#182 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#558 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#959 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#959 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#185 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#184 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#183 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#41 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#40 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#960 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#40 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#960 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#959 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#960 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#565 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#186 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#561 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#559 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#962 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#962 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#961 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#963 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#962 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#566 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#562 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#560 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#964 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#187 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#964 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#186 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#42 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#963 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#185 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#563 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#561 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#41 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#965 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#964 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#564 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#562 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#966 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#965 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#565 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#563 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#967 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#967 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#966 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#968 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#968 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#967 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#187 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#570 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#566 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#564 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#968 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#571 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#567 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#565 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#970 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#970 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#969 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#572 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#568 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#566 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#970 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#189 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#188 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#187 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#972 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#971 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#972 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#974 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#974 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#973 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#42 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#42 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#569 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#567 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#190 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#189 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#188 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#975 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#975 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#974 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#43 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#43 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#976 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#975 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#574 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#570 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#568 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#977 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#976 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#575 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#571 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#978 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#977 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#572 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#979 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#979 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#978 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#570 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#191 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#190 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#189 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#980 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#979 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#573 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#571 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#980 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#578 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#574 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#572 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#982 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#981 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#579 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#575 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#573 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#982 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#580 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#576 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#574 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#192 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#191 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#190 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#984 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#984 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#983 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#581 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#575 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#985 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#984 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#193 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#192 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#191 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#582 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#578 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#576 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#986 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#985 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#45 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#44 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#44 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#10 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#987 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#987 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#986 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#583 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#988 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#988 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#987 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#577 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#194 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#193 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#192 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#989 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#988 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#584 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#580 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#578 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#195 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#194 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#193 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#46 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#45 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#45 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#11 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#990 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#990 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#989 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#581 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#579 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#991 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#991 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#990 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#991 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#586 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#582 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#993 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#580 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#993 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#992 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#195 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#194 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#994 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#993 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#994 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#587 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#583 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#581 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#996 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#996 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#588 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#197 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#995 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#196 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#195 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#584 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#997 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#996 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#998 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#589 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#997 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#198 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#196 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#585 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#583 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#998 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#586 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#584 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#587 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1000 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#585 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1000 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#999 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1001 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1000 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#588 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#586 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1002 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1002 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1001 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#589 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#587 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1003 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1002 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1004 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1004 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1003 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#199 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#198 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#197 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1005 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1005 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1004 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1005 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#595 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#591 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1007 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1007 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1006 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#200 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#592 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#199 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#590 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#198 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#593 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#591 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1009 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1009 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1008 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#598 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#594 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#592 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1010 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#595 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1009 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#593 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#201 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#200 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#199 FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#47 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#46 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1011 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1011 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1010 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1012 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1011 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#596 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#594 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1013 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1013 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1012 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1014 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1013 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#601 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#597 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#595 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1015 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1014 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1016 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1016 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1015 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1017 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1017 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1016 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1018 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#602 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#598 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#596 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#603 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#599 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#597 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1020 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1019 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#600 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#605 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#601 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#599 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#202 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#201 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#600 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#204 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#203 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#202 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#608 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#604 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#602 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1023 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1022 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1024 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1024 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1023 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1024 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#609 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#605 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#603 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1025 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#610 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#606 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#604 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#205 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1027 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#204 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#203 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1026 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1028 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1027 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1029 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#611 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#607 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#605 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1028 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1029 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#612 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#206 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#608 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#205 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#606 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#204 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#47 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#47 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1031 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1031 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1030 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1031 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#607 FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#49 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#48 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#48 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#206 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#205 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1033 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1033 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1032 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#614 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#610 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#608 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#208 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#206 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1034 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1033 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#615 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#611 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#208 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#609 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#207 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#49 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#49 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1035 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#616 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1035 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1034 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#612 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#610 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#208 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1036 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1035 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#617 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#613 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#611 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#211 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#210 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#209 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#51 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#50 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1037 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1037 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1036 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#618 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1038 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1037 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#612 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#212 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#211 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#210 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1039 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1039 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1040 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1039 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1041 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1040 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#615 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#613 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#212 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#211 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#620 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#614 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1042 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1042 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1041 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#52 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#51 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#51 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#621 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1043 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1042 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#617 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#615 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1043 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#622 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#618 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#616 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#214 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#213 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#212 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1045 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1044 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#623 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#619 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1046 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#617 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1046 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1045 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#14 FD: 26 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#8 FD: 26 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#9 FD: 26 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#8 FD: 26 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#9 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#9 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#214 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#213 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1047 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1046 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#624 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#620 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#618 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#53 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#52 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#52 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#10 FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#10 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#6 FD: 26 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#6 FD: 26 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#8 FD: 26 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#8 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1048 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#625 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#621 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1049 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1049 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1048 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#626 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#622 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#620 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1049 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#627 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1051 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#623 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1051 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#621 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1050 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#215 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#214 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1052 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1052 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1051 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1052 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#628 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#624 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#622 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1054 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1054 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1053 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#625 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#623 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1055 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1054 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#630 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#626 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#624 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1056 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1056 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#631 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1057 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1057 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#625 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1056 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#632 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#628 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1058 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1058 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1057 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1059 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1059 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1058 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1060 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1060 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1059 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#629 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#627 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#217 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#216 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#215 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1060 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#634 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#630 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#628 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#218 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#217 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#216 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1061 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#53 FD: 26 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#16 FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#55 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#54 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#54 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#9 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#10 FD: 26 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#11 FD: 26 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#8 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#8 FD: 26 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#7 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#7 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#9 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#8 FD: 26 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#8 FD: 26 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#8 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#7 FD: 26 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#10 FD: 26 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#9 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#7 FD: 26 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#12 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#10 FD: 26 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#8 FD: 26 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#8 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#10 FD: 26 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#11 FD: 26 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#12 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#56 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#55 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#55 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1063 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1063 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1062 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#219 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#218 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#217 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#631 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#629 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#632 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#630 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1063 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#220 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#219 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#218 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#221 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#220 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#219 FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#19 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#222 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#221 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#220 FD: 26 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1065 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1065 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1064 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#21 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#57 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#56 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#56 FD: 26 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#14 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#224 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#637 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#633 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#631 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#221 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1066 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1066 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1065 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#632 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1067 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1066 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1068 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1068 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#639 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#635 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#633 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1069 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1069 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1068 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1069 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#640 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#636 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#634 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#635 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#225 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#223 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#58 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#57 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#57 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#222 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1071 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1070 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#642 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#638 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1072 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#636 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#226 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#224 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#223 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1073 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1072 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#643 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#639 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#637 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1074 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1073 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1075 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1074 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#640 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#638 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#227 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1076 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#225 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#224 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1076 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1075 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1077 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1077 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1076 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#641 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#639 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1078 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1077 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1079 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1078 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#646 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#642 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#640 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#226 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#225 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1080 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1079 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#647 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#643 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#641 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1081 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#648 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1081 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1080 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#644 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#642 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1081 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#649 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#645 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#643 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1083 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1082 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#650 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#646 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#644 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1084 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1083 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#229 FD: 200 BD: 1 +.+.: (wq_completion)hci3#3 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#227 FD: 189 BD: 1 +.+.: (wq_completion)hci3#4 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1084 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1086 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1085 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1088 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#651 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#230 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1086 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#647 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#645 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#228 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#652 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#648 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#646 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1090 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1088 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#649 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#647 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1089 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#11 FD: 26 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1092 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#654 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#650 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#648 FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#231 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#229 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#228 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#59 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#58 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#58 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#20 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#15 FD: 26 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#15 ->&rq->__lock FD: 1 BD: 5 +.+.: (wq_completion)tipc_rcv#2 FD: 1 BD: 5 +.+.: (wq_completion)tipc_send#2 FD: 1 BD: 5 +.+.: (wq_completion)tipc_crypto#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1093 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1093 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1091 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#13 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#9 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#11 FD: 26 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#11 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#12 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#12 FD: 26 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#13 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1094 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#13 FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1094 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1092 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#13 FD: 26 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#9 FD: 26 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#9 FD: 26 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#12 FD: 26 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#11 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1095 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1095 FD: 26 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#9 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1093 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#9 FD: 26 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#7 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#7 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#7 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#9 FD: 26 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1094 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#655 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#651 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#649 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1096 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#656 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#652 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#650 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1099 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1097 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#653 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#651 FD: 717 BD: 13 +.+.: &devlink->lock_key#171 ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&c->lock ->&xa->xa_lock#19 ->&n->list_lock ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->&rq->__lock ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1098 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1101 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1099 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1102 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1100 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#658 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#654 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#652 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1101 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#655 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#653 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1104 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1102 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#654 FD: 1 BD: 56 +.+.: (wq_completion)phy341 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1105 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1103 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1106 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#661 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#657 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#655 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1107 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1105 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#658 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#656 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1106 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1109 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1107 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#663 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#659 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1110 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1108 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1111 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1109 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#660 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#658 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1112 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1110 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#10 FD: 27 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#10 ->rcu_node_0 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#3 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1113 ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1113 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1111 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1114 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc42_nci_cmd_wq#2 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1114 FD: 26 BD: 1 +.+.: (wq_completion)nfc42_nci_rx_wq#2 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1112 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_tx_wq#2 FD: 26 BD: 2 +.+.: (wq_completion)nfc43_nci_cmd_wq ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc43_nci_rx_wq FD: 26 BD: 1 +.+.: (wq_completion)nfc43_nci_tx_wq ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc41_nci_cmd_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc41_nci_rx_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#3 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#3 FD: 26 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#10 FD: 26 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#8 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#8 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#8 FD: 26 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#8 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#10 FD: 26 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#10 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#12 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#12 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#10 FD: 26 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#10 FD: 26 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#10 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#13 FD: 26 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#12 FD: 26 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#11 FD: 26 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#12 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#15 FD: 26 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#17 FD: 26 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#17 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#60 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#59 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#59 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#232 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#230 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#229 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#665 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#661 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#659 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1115 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1115 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1113 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1116 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1114 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1117 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1115 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1116 ->&rq->__lock FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg0#319 ->(work_completion)(&peer->transmit_handshake_work) FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg1#311 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg0#320 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#162 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#156 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg1#312 ->&rq->__lock ->(work_completion)(&peer->transmit_handshake_work) FD: 68 BD: 1 +.+.: (wq_completion)wg-kex-wg2#308 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1119 FD: 104 BD: 1 +.+.: (wq_completion)wg-kex-wg2#309 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 105 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#153 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1117 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1120 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1118 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1119 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#662 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#660 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1122 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1122 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1120 ->&rq->__lock FD: 189 BD: 1 +.+.: (wq_completion)hci0#6 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1123 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1123 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1121 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#667 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1124 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1125 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1125 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#233 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#231 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1123 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#668 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#664 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#661 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1124 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#669 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#665 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#61 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#60 FD: 26 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#60 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#23 FD: 26 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1127 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1125 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_tx_wq#4 FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#670 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#666 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#663 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#234 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#232 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#231 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#18 FD: 26 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#18 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#16 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#62 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#61 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#61 FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#26 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#24 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#24 FD: 26 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#13 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#13 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#15 FD: 26 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#14 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc43_nci_cmd_wq#2 FD: 26 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#12 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc43_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc43_nci_tx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#12 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#12 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#12 FD: 26 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#27 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#12 FD: 26 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#25 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#19 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#671 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#667 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#664 FD: 26 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#11 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc40_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#11 FD: 26 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_rx_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc40_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#13 FD: 26 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#13 FD: 26 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#4 FD: 26 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#4 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#668 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#665 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#11 FD: 26 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#11 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#9 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#9 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#5 FD: 26 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#16 FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#12 FD: 26 BD: 2 +.+.: (wq_completion)nfc41_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1128 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#13 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1126 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#14 FD: 26 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#14 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#235 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#233 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#232 FD: 26 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#14 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc42_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#15 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc42_nci_rx_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#63 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#62 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#16 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#16 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#16 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#28 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#26 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#26 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#669 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#666 FD: 26 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#17 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#17 FD: 26 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#15 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#15 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#15 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#13 FD: 26 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#13 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#17 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#17 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#16 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#14 FD: 26 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#14 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#15 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#15 FD: 26 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#16 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#21 FD: 26 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#21 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#21 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#674 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#670 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#667 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#29 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#27 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#27 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#16 FD: 26 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#16 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#17 FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#17 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#64 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#63 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#63 FD: 26 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#18 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#18 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#236 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#234 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#233 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1129 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1129 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1127 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1130 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1128 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#675 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#671 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#668 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1131 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1129 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#676 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#672 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#669 FD: 26 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#235 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#234 FD: 717 BD: 13 +.+.: &devlink->lock_key#172 ->&rq->__lock ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&c->lock ->&xa->xa_lock#19 ->pcpu_alloc_mutex ->&n->list_lock ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1132 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1132 FD: 1 BD: 5 +.+.: (wq_completion)tipc_crypto#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1130 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#673 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#670 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1133 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1131 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#678 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#671 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#679 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#675 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#672 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1134 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1132 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#236 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#235 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1135 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1133 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#676 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#673 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1136 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1136 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1137 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1135 FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#677 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#674 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1138 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1136 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1139 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1137 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1140 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1140 ->&rq->__lock ->&cfs_rq->removed.lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1138 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#682 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#675 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1139 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1142 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1140 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#679 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#676 FD: 1 BD: 56 +.+.: (wq_completion)phy338 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1143 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1141 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#684 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#680 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#677 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1144 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1142 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1145 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1143 ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1146 FD: 26 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1144 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#18 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#17 FD: 26 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#16 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#14 FD: 26 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#13 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#12 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#10 FD: 26 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#13 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#13 FD: 26 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#10 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#10 ->&rq->__lock FD: 29 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#14 ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#14 FD: 26 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#11 ->&rq->__lock FD: 26 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#11 ->&rq->__lock FD: 26 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#14 all lock chains: irq_context: 0 (console_sem).lock irq_context: 0 &obj_hash[i].lock irq_context: 0 &obj_hash[i].lock pool_lock irq_context: 0 cgroup_mutex irq_context: 0 fixmap_lock irq_context: 0 cpu_hotplug_lock irq_context: 0 cpu_hotplug_lock jump_label_mutex irq_context: 0 console_mutex irq_context: 0 input_pool.lock irq_context: 0 base_crng.lock irq_context: 0 cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 crng_init_wait.lock irq_context: 0 early_pfn_lock irq_context: 0 devtree_lock irq_context: 0 resource_lock irq_context: 0 restart_handler_list.lock irq_context: 0 system_transition_mutex irq_context: 0 pcpu_lock irq_context: 0 debug_hook_lock irq_context: 0 zonelist_update_seq irq_context: 0 zonelist_update_seq zonelist_update_seq.seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 &zone->lock irq_context: 0 &zone->lock &____s->seqcount irq_context: 0 &pcp->lock &zone->lock irq_context: 0 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &____s->seqcount irq_context: 0 pool_lock#2 irq_context: 0 pcpu_alloc_mutex irq_context: 0 pcpu_alloc_mutex pcpu_lock irq_context: 0 &n->list_lock irq_context: 0 &c->lock irq_context: 0 slab_mutex irq_context: 0 slab_mutex pool_lock#2 irq_context: 0 slab_mutex pcpu_alloc_mutex irq_context: 0 slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 trace_types_lock irq_context: 0 panic_notifier_list.lock irq_context: 0 die_chain.lock irq_context: 0 trace_event_sem irq_context: 0 batched_entropy_u32.lock irq_context: 0 batched_entropy_u32.lock crngs.lock irq_context: 0 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 sysctl_lock irq_context: 0 &rq->__lock irq_context: 0 &rq->__lock rcu_read_lock &cfs_b->lock irq_context: 0 init_task.pi_lock irq_context: 0 init_task.pi_lock &rq->__lock irq_context: 0 init_task.vtime_seqcount irq_context: 0 slab_mutex &c->lock irq_context: 0 slab_mutex &pcp->lock &zone->lock irq_context: 0 slab_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 slab_mutex &____s->seqcount irq_context: 0 wq_pool_mutex irq_context: 0 wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 wq_pool_mutex &____s->seqcount irq_context: 0 wq_pool_mutex &c->lock irq_context: 0 wq_pool_mutex pool_lock#2 irq_context: 0 &wq->mutex irq_context: 0 &wq->mutex &pool->lock irq_context: 0 wq_pool_mutex &wq->mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 shrinker_mutex irq_context: 0 rcu_node_0 irq_context: 0 rcu_state.barrier_lock irq_context: 0 rcu_state.barrier_lock rcu_node_0 irq_context: 0 &rnp->exp_poll_lock irq_context: 0 &rnp->exp_poll_lock rcu_read_lock &pool->lock irq_context: 0 &rnp->exp_poll_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock &pcp->lock &zone->lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock &____s->seqcount irq_context: 0 trace_event_sem trace_event_ida.xa_lock &c->lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock pool_lock#2 irq_context: 0 trigger_cmd_mutex irq_context: 0 free_vmap_area_lock irq_context: 0 vmap_area_lock irq_context: 0 acpi_probe_mutex irq_context: 0 acpi_probe_mutex pool_lock#2 irq_context: 0 acpi_probe_mutex free_vmap_area_lock irq_context: 0 acpi_probe_mutex vmap_area_lock irq_context: 0 acpi_probe_mutex &pcp->lock &zone->lock irq_context: 0 acpi_probe_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_probe_mutex &____s->seqcount irq_context: 0 acpi_probe_mutex init_mm.page_table_lock irq_context: 0 acpi_probe_mutex resource_lock irq_context: 0 acpi_probe_mutex &c->lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 acpi_probe_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 acpi_probe_mutex (console_sem).lock irq_context: 0 acpi_probe_mutex irq_domain_mutex irq_context: 0 acpi_probe_mutex pcpu_alloc_mutex irq_context: 0 acpi_probe_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 acpi_probe_mutex irq_domain_mutex pool_lock#2 irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 acpi_probe_mutex &domain->mutex irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &pcp->lock &zone->lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &c->lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_probe_mutex &domain->mutex pool_lock#2 irq_context: 0 acpi_probe_mutex &domain->mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex &desc->request_mutex irq_context: 0 acpi_probe_mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex cpu_pm_notifier.lock irq_context: 0 acpi_probe_mutex &obj_hash[i].lock irq_context: 0 acpi_probe_mutex purge_vmap_area_lock irq_context: 0 acpi_probe_mutex iort_msi_chip_lock irq_context: 0 acpi_probe_mutex &zone->lock irq_context: 0 acpi_probe_mutex &zone->lock &____s->seqcount irq_context: 0 acpi_probe_mutex its_lock irq_context: 0 acpi_probe_mutex resource_lock irq_context: 0 acpi_probe_mutex efi_mem_reserve_persistent_lock irq_context: 0 acpi_probe_mutex lpi_range_lock irq_context: 0 acpi_probe_mutex syscore_ops_lock irq_context: 0 acpi_probe_mutex its_lock &its->lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-down irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-up irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex resource_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex pool_lock#2 irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex resource_lock irq_context: 0 timekeeper_lock irq_context: 0 timekeeper_lock tk_core.seq.seqcount irq_context: 0 timekeeper_lock tk_core.seq.seqcount &obj_hash[i].lock irq_context: 0 acpi_probe_mutex &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex (console_sem).lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock tk_core.seq.seqcount irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock tick_broadcast_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock jiffies_seq.seqcount irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 acpi_probe_mutex clocksource_mutex irq_context: 0 clockevents_lock irq_context: 0 tk_core.seq.seqcount irq_context: 0 &base->lock irq_context: 0 &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &pool->lock irq_context: 0 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 batched_entropy_u64.lock irq_context: 0 batched_entropy_u64.lock crngs.lock irq_context: 0 pmus_lock irq_context: 0 pmus_lock pcpu_alloc_mutex irq_context: 0 pmus_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 pmus_lock pool_lock#2 irq_context: 0 pmus_lock &obj_hash[i].lock irq_context: 0 &swhash->hlist_mutex irq_context: 0 pmus_lock &cpuctx_mutex irq_context: 0 pmus_lock &obj_hash[i].lock pool_lock irq_context: 0 tty_ldiscs_lock irq_context: 0 console_lock irq_context: 0 console_lock pool_lock#2 irq_context: 0 console_lock &obj_hash[i].lock irq_context: 0 console_lock &pcp->lock &zone->lock irq_context: 0 console_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 console_lock &____s->seqcount irq_context: 0 console_lock &c->lock irq_context: 0 console_lock kbd_event_lock irq_context: 0 console_lock kbd_event_lock led_lock irq_context: 0 console_lock (console_sem).lock irq_context: 0 console_lock console_owner_lock irq_context: 0 init_task.alloc_lock irq_context: 0 acpi_ioremap_lock irq_context: 0 acpi_ioremap_lock pool_lock#2 irq_context: 0 semaphore->lock irq_context: 0 *(&acpi_gbl_reference_count_lock) irq_context: 0 hrtimer_bases.lock irq_context: 0 hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 percpu_counters_lock irq_context: 0 tomoyo_policy_lock irq_context: 0 tomoyo_policy_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem irq_context: 0 pernet_ops_rwsem stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem crngs.lock irq_context: 0 pernet_ops_rwsem net_rwsem irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock irq_context: 0 rtnl_mutex irq_context: 0 rtnl_mutex &c->lock irq_context: 0 rtnl_mutex &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &____s->seqcount irq_context: 0 rtnl_mutex pool_lock#2 irq_context: 0 lock irq_context: 0 lock kernfs_idr_lock irq_context: 0 lock kernfs_idr_lock pool_lock#2 irq_context: 0 &root->kernfs_rwsem irq_context: 0 file_systems_lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 dq_list_lock irq_context: 0 sb_lock irq_context: 0 &type->s_umount_key/1 irq_context: 0 &type->s_umount_key/1 pool_lock#2 irq_context: 0 &type->s_umount_key/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key/1 shrinker_mutex irq_context: 0 &type->s_umount_key/1 shrinker_mutex pool_lock#2 irq_context: 0 &type->s_umount_key/1 list_lrus_mutex irq_context: 0 &type->s_umount_key/1 sb_lock irq_context: 0 &type->s_umount_key/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key/1 &c->lock irq_context: 0 &type->s_umount_key/1 &____s->seqcount irq_context: 0 &type->s_umount_key/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key/1 percpu_counters_lock irq_context: 0 &type->s_umount_key/1 crngs.lock irq_context: 0 &type->s_umount_key/1 &sbinfo->stat_lock irq_context: 0 &type->s_umount_key/1 &sb->s_type->i_lock_key irq_context: 0 &type->s_umount_key/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key/1 batched_entropy_u32.lock irq_context: 0 &type->s_umount_key/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key/1 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 &type->s_umount_key/1 &dentry->d_lock irq_context: 0 mnt_id_ida.xa_lock irq_context: 0 &dentry->d_lock irq_context: 0 mount_lock irq_context: 0 mount_lock mount_lock.seqcount irq_context: 0 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#2/1 irq_context: 0 &type->s_umount_key#2/1 pool_lock#2 irq_context: 0 &type->s_umount_key#2/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#2/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#2/1 shrinker_mutex irq_context: 0 &type->s_umount_key#2/1 &c->lock irq_context: 0 &type->s_umount_key#2/1 &____s->seqcount irq_context: 0 &type->s_umount_key#2/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#2/1 sb_lock irq_context: 0 &type->s_umount_key#2/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#2/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#2/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#2/1 &sb->s_type->i_lock_key#2 irq_context: 0 &type->s_umount_key#2/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#2/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#2/1 &sb->s_type->i_lock_key#2 &dentry->d_lock irq_context: 0 &type->s_umount_key#2/1 &dentry->d_lock irq_context: 0 ucounts_lock irq_context: 0 proc_inum_ida.xa_lock irq_context: 0 init_fs.lock irq_context: 0 init_fs.lock init_fs.seq.seqcount irq_context: hardirq jiffies_lock irq_context: hardirq jiffies_lock jiffies_seq.seqcount irq_context: hardirq log_wait.lock irq_context: 0 &type->s_umount_key#3/1 irq_context: 0 &type->s_umount_key#3/1 pool_lock#2 irq_context: 0 &type->s_umount_key#3/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#3/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#3/1 shrinker_mutex irq_context: 0 &type->s_umount_key#3/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#3/1 sb_lock irq_context: 0 &type->s_umount_key#3/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#3/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#3/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#3/1 &____s->seqcount irq_context: 0 &type->s_umount_key#3/1 &c->lock irq_context: 0 &type->s_umount_key#3/1 &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#3/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#3/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#3/1 &sb->s_type->i_lock_key#3 &dentry->d_lock irq_context: 0 &type->s_umount_key#3/1 &dentry->d_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-down irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-up irq_context: 0 proc_subdir_lock irq_context: 0 proc_subdir_lock irq_context: 0 pernet_ops_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 &type->s_umount_key#4/1 irq_context: 0 &type->s_umount_key#4/1 pool_lock#2 irq_context: 0 &type->s_umount_key#4/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#4/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#4/1 shrinker_mutex irq_context: 0 &type->s_umount_key#4/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#4/1 sb_lock irq_context: 0 &type->s_umount_key#4/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#4/1 &sb->s_type->i_lock_key#4 irq_context: 0 &type->s_umount_key#4/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#4/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#4/1 &sb->s_type->i_lock_key#4 &dentry->d_lock irq_context: 0 &type->s_umount_key#4/1 &dentry->d_lock irq_context: 0 cgroup_mutex pcpu_alloc_mutex irq_context: 0 cgroup_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 cgroup_mutex &c->lock irq_context: 0 cgroup_mutex &____s->seqcount irq_context: 0 cgroup_mutex pool_lock#2 irq_context: 0 cgroup_mutex lock irq_context: 0 cgroup_mutex lock kernfs_idr_lock irq_context: 0 cgroup_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 cgroup_mutex &root->kernfs_rwsem irq_context: 0 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cgroup_mutex &obj_hash[i].lock irq_context: 0 cgroup_mutex cgroup_file_kn_lock irq_context: 0 cgroup_mutex &obj_hash[i].lock pool_lock irq_context: 0 cgroup_mutex css_set_lock irq_context: 0 lock cgroup_idr_lock irq_context: 0 lock cgroup_idr_lock pool_lock#2 irq_context: 0 cpuset_mutex irq_context: 0 cpuset_mutex callback_lock irq_context: 0 cgroup_mutex blkcg_pol_mutex irq_context: 0 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex irq_context: 0 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 cgroup_mutex lock cgroup_idr_lock irq_context: 0 cgroup_mutex lock cgroup_idr_lock pool_lock#2 irq_context: 0 cgroup_mutex percpu_counters_lock irq_context: 0 cgroup_mutex shrinker_mutex irq_context: 0 cgroup_mutex shrinker_mutex pool_lock#2 irq_context: 0 cgroup_mutex &base->lock irq_context: 0 cgroup_mutex &base->lock &obj_hash[i].lock irq_context: 0 cgroup_mutex devcgroup_mutex irq_context: 0 cgroup_mutex cpu_hotplug_lock irq_context: 0 cgroup_mutex cpu_hotplug_lock freezer_mutex irq_context: 0 rcu_state.exp_mutex rcu_node_0 irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[0] irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[1] irq_context: 0 init_sighand.siglock irq_context: 0 init_mm.page_table_lock irq_context: 0 init_files.file_lock irq_context: 0 rcu_read_lock init_sighand.siglock irq_context: 0 lock pidmap_lock irq_context: 0 lock pidmap_lock pool_lock#2 irq_context: 0 pidmap_lock irq_context: 0 cgroup_threadgroup_rwsem irq_context: 0 cgroup_threadgroup_rwsem css_set_lock irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem tk_core.seq.seqcount irq_context: 0 cgroup_threadgroup_rwsem tasklist_lock irq_context: 0 cgroup_threadgroup_rwsem tasklist_lock init_sighand.siglock irq_context: 0 cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock irq_context: 0 cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 &p->pi_lock irq_context: 0 &p->pi_lock &rq->__lock irq_context: 0 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &p->pi_lock &rq->__lock &base->lock irq_context: 0 &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &p->pi_lock irq_context: 0 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 (kthreadd_done).wait.lock irq_context: 0 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sighand->siglock irq_context: 0 &p->alloc_lock irq_context: 0 &p->alloc_lock &____s->seqcount#2 irq_context: 0 fs_reclaim irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kthread_create_lock irq_context: 0 &x->wait irq_context: 0 rcu_read_lock &sighand->siglock irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem tasklist_lock &sighand->siglock irq_context: 0 &x->wait &p->pi_lock irq_context: 0 &x->wait &p->pi_lock &rq->__lock irq_context: 0 &x->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (worker)->lock irq_context: 0 wq_pool_mutex fs_reclaim irq_context: 0 wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 wq_pool_mutex kthread_create_lock irq_context: 0 wq_pool_mutex &p->pi_lock irq_context: 0 wq_pool_mutex &p->pi_lock &rq->__lock irq_context: 0 wq_pool_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_mutex &x->wait irq_context: 0 wq_pool_mutex &rq->__lock irq_context: 0 wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_mutex &obj_hash[i].lock irq_context: 0 wq_pool_attach_mutex irq_context: 0 wq_mayday_lock irq_context: 0 &xa->xa_lock irq_context: 0 &pool->lock irq_context: 0 &pool->lock &p->pi_lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (&pool->mayday_timer) irq_context: 0 &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rnp->exp_poll_wq) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rnp->exp_poll_wq) &rnp->exp_poll_lock irq_context: 0 (null) irq_context: 0 (null) tk_core.seq.seqcount irq_context: 0 (&wq_watchdog_timer) irq_context: 0 (wq_completion)events_unbound irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) allocation_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq allocation_wait.lock irq_context: hardirq allocation_wait.lock &p->pi_lock irq_context: hardirq allocation_wait.lock &p->pi_lock &rq->__lock irq_context: hardirq allocation_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 batched_entropy_u8.lock irq_context: 0 batched_entropy_u8.lock crngs.lock irq_context: 0 kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex irq_context: 0 rcu_tasks.cbs_gbl_lock irq_context: hardirq rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#2 irq_context: 0 rcu_tasks__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[2] irq_context: 0 rcu_tasks.tasks_gp_mutex &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex &base->lock irq_context: 0 rcu_tasks.tasks_gp_mutex &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock &obj_hash[i].lock pool_lock irq_context: 0 rcu_tasks.tasks_gp_mutex &ACCESS_PRIVATE(sdp, lock) irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#3 irq_context: 0 rcu_tasks.tasks_gp_mutex &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks_trace.tasks_gp_mutex irq_context: 0 rcu_tasks_trace.cbs_gbl_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &ACCESS_PRIVATE(sdp, lock) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#3 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#3 &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#3 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#3 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex kernel/rcu/tasks.h:152 irq_context: softirq (&timer.timer) irq_context: softirq (&timer.timer) &p->pi_lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&kfence_timer)->timer irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex (&timer.timer) irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[3] irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_tasks__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#2 irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#2 &p->pi_lock irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#2 &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_tasks__percpu.cbs_pcpu_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_tasks__percpu.cbs_pcpu_lock &base->lock irq_context: 0 rcu_read_lock rcu_tasks__percpu.cbs_pcpu_lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock &ACCESS_PRIVATE(rtpcp, lock) irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[0] irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#2 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#2 &p->pi_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#2 &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &rq->__lock irq_context: 0 rcu_read_lock rcu_tasks_trace__percpu.cbs_pcpu_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_tasks_trace__percpu.cbs_pcpu_lock &base->lock irq_context: 0 rcu_read_lock rcu_tasks_trace__percpu.cbs_pcpu_lock &base->lock &obj_hash[i].lock irq_context: 0 (memory_chain).rwsem irq_context: 0 cpu_hotplug_lock smpboot_threads_lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock fs_reclaim irq_context: 0 cpu_hotplug_lock smpboot_threads_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock smpboot_threads_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock smpboot_threads_lock kthread_create_lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &p->pi_lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &x->wait irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &rq->__lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &obj_hash[i].lock irq_context: 0 &rcu_state.gp_wq irq_context: 0 &stop_pi_lock irq_context: 0 &stop_pi_lock &rq->__lock irq_context: 0 &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &stopper->lock irq_context: 0 (module_notify_list).rwsem irq_context: 0 ddebug_lock irq_context: 0 iort_msi_chip_lock irq_context: 0 irq_domain_mutex irq_context: 0 irq_domain_mutex fs_reclaim irq_context: 0 irq_domain_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 irq_domain_mutex pool_lock#2 irq_context: 0 cci_probing irq_context: 0 cci_probing devtree_lock irq_context: 0 resource_lock irq_context: 0 fixmap_lock fs_reclaim irq_context: 0 fixmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 fixmap_lock &____s->seqcount irq_context: 0 fixmap_lock &c->lock irq_context: 0 fixmap_lock pool_lock#2 irq_context: 0 rcu_read_lock ptlock_ptr(ptdesc) irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex clockevents_lock irq_context: 0 watchdog_mutex irq_context: 0 watchdog_mutex cpu_hotplug_lock irq_context: 0 watchdog_mutex cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 watchdog_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)events irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &x->wait#4 irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) hrtimer_bases.lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &x->wait#5 irq_context: 0 watchdog_mutex cpu_hotplug_lock &x->wait#5 irq_context: 0 &newf->file_lock irq_context: 0 init_fs.lock &dentry->d_lock irq_context: 0 &p->vtime.seqcount irq_context: 0 cpu_hotplug_lock mem_hotplug_lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock mem_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[1] irq_context: 0 cpu_hotplug_lock mem_hotplug_lock.waiters.lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock cpu_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[2] irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpuset_hotplug_work irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock fs_reclaim irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock pool_lock#2 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock kthread_create_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &x->wait irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &obj_hash[i].lock pool_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback &obj_hash[i].lock irq_context: softirq rcu_callback pool_lock#2 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &swhash->hlist_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pmus_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pmus_lock &cpuctx_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pcp_batch_high_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &xa->xa_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock fs_reclaim irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pool_lock#2 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &c->lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &____s->seqcount irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock kthread_create_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &x->wait irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock wq_pool_attach_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pcpu_alloc_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_node_0 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock (cpu_running).wait.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &base->lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &base->lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock cpu_hotplug_lock.waiters.lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback mem_hotplug_lock.rss.gp_wait.lock irq_context: softirq rcu_callback mem_hotplug_lock.rss.gp_wait.lock &obj_hash[i].lock irq_context: softirq rcu_callback cpu_hotplug_lock.rss.gp_wait.lock irq_context: 0 its_lock irq_context: 0 its_lock &its->lock irq_context: 0 clockevents_lock tick_broadcast_lock irq_context: 0 clockevents_lock jiffies_seq.seqcount irq_context: 0 clockevents_lock tk_core.seq.seqcount irq_context: 0 &irq_desc_lock_class irq_context: 0 &irq_desc_lock_class irq_controller_lock irq_context: 0 (cpu_running).wait.lock irq_context: 0 (cpu_running).wait.lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock (&timer.timer) irq_context: 0 &x->wait#6 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &x->wait#6 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &x->wait#6 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state-up sparse_irq_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up sparse_irq_lock &irq_desc_lock_class irq_context: 0 cpu_hotplug_lock cpuhp_state-up &swhash->hlist_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up pmus_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up pmus_lock &cpuctx_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up &x->wait#4 irq_context: 0 cpu_hotplug_lock cpuhp_state-up &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &obj_hash[i].lock pool_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up hrtimer_bases.lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state-up hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &p->pi_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &x->wait#7 irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &pool->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_node_0 irq_context: 0 cpu_hotplug_lock cpuhp_state-up resource_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &pcp->lock &zone->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state-up &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state-up &c->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up pool_lock#2 irq_context: 0 cpu_hotplug_lock cpuhp_state-up resource_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock &rt_b->rt_runtime_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock &rt_b->rt_runtime_lock &rt_rq->rt_runtime_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock rcu_read_lock &cfs_b->lock irq_context: 0 &x->wait#6 &p->pi_lock irq_context: hardirq &rq->__lock &cfs_rq->removed.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex &rcu_state.expedited_wq irq_context: 0 &rcu_state.expedited_wq irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex &base->lock irq_context: 0 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock stop_cpus_mutex irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stopper->lock irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &rq->__lock irq_context: 0 &x->wait#8 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex (&timer.timer) irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[3] irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &obj_hash[i].lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &base->lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &x->wait#8 irq_context: 0 &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events rdist_memreserve_cpuhp_cleanup_work irq_context: 0 (wq_completion)events rdist_memreserve_cpuhp_cleanup_work cpu_hotplug_lock irq_context: 0 (wq_completion)events rdist_memreserve_cpuhp_cleanup_work cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 sched_domains_mutex irq_context: 0 sched_domains_mutex fs_reclaim irq_context: 0 sched_domains_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sched_domains_mutex pool_lock#2 irq_context: 0 sched_domains_mutex &obj_hash[i].lock irq_context: 0 sched_domains_mutex pcpu_alloc_mutex irq_context: 0 sched_domains_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sched_domains_mutex &c->lock irq_context: 0 sched_domains_mutex &____s->seqcount irq_context: 0 sched_domains_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sched_domains_mutex rcu_read_lock pool_lock#2 irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &cp->lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &rt_b->rt_runtime_lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &rt_b->rt_runtime_lock &rt_rq->rt_runtime_lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock rcu_read_lock &cfs_b->lock irq_context: 0 sched_domains_mutex pcpu_lock irq_context: 0 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 slab_mutex fs_reclaim irq_context: 0 slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#5/1 irq_context: 0 &type->s_umount_key#5/1 fs_reclaim irq_context: 0 &type->s_umount_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#5/1 pool_lock#2 irq_context: 0 &type->s_umount_key#5/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#5/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#5/1 shrinker_mutex irq_context: 0 &type->s_umount_key#5/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#5/1 sb_lock irq_context: 0 &type->s_umount_key#5/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#5/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#5/1 percpu_counters_lock irq_context: 0 &type->s_umount_key#5/1 crngs.lock irq_context: 0 &type->s_umount_key#5/1 &sbinfo->stat_lock irq_context: 0 &type->s_umount_key#5/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#5/1 &sb->s_type->i_lock_key#5 irq_context: 0 &type->s_umount_key#5/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#5/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#5/1 batched_entropy_u32.lock irq_context: 0 &type->s_umount_key#5/1 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 &type->s_umount_key#5/1 &dentry->d_lock irq_context: 0 (setup_done).wait.lock irq_context: 0 namespace_sem irq_context: 0 namespace_sem fs_reclaim irq_context: 0 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 namespace_sem &pcp->lock &zone->lock irq_context: 0 namespace_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 namespace_sem &____s->seqcount irq_context: 0 namespace_sem &c->lock irq_context: 0 namespace_sem pool_lock#2 irq_context: 0 namespace_sem mnt_id_ida.xa_lock irq_context: 0 namespace_sem pcpu_alloc_mutex irq_context: 0 namespace_sem pcpu_alloc_mutex pcpu_lock irq_context: 0 namespace_sem &dentry->d_lock irq_context: 0 namespace_sem mount_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &p->alloc_lock init_fs.lock irq_context: 0 rcu_read_lock &____s->seqcount#3 irq_context: 0 file_systems_lock irq_context: 0 &type->s_umount_key#6 irq_context: 0 &type->s_umount_key#6 fs_reclaim irq_context: 0 &type->s_umount_key#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#6 pool_lock#2 irq_context: 0 &type->s_umount_key#6 &dentry->d_lock irq_context: 0 &type->s_umount_key#6 &____s->seqcount irq_context: 0 &type->s_umount_key#6 &c->lock irq_context: 0 &type->s_umount_key#6 &lru->node[i].lock irq_context: 0 &type->s_umount_key#6 &sbinfo->stat_lock irq_context: 0 &type->s_umount_key#6 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key irq_context: 0 &sb->s_type->i_mutex_key namespace_sem irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key namespace_sem pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount pool_lock#2 irq_context: 0 rcu_read_lock &sb->s_type->i_lock_key#2 irq_context: 0 rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_lock_key#5 irq_context: 0 &fs->lock irq_context: 0 &fs->lock &____s->seqcount#3 irq_context: 0 (setup_done).wait.lock &p->pi_lock irq_context: 0 req_lock irq_context: 0 of_mutex irq_context: 0 of_mutex fs_reclaim irq_context: 0 of_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 of_mutex pool_lock#2 irq_context: 0 of_mutex lock irq_context: 0 of_mutex lock kernfs_idr_lock irq_context: 0 of_mutex &root->kernfs_rwsem irq_context: 0 of_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &x->wait#9 irq_context: 0 &k->list_lock irq_context: 0 bus_type_sem irq_context: 0 &root->kernfs_rwsem irq_context: 0 &dev->power.lock irq_context: 0 dpm_list_mtx irq_context: 0 uevent_sock_mutex irq_context: 0 running_helpers_waitq.lock irq_context: 0 sysfs_symlink_target_lock irq_context: 0 &k->k_lock irq_context: 0 &dev->mutex &k->list_lock irq_context: 0 &dev->mutex &k->k_lock irq_context: 0 &dev->mutex &dev->power.lock irq_context: 0 subsys mutex irq_context: 0 memory_blocks.xa_lock irq_context: 0 memory_blocks.xa_lock pool_lock#2 irq_context: 0 lock kernfs_idr_lock &c->lock irq_context: 0 lock kernfs_idr_lock &____s->seqcount irq_context: softirq rcu_read_lock &rq->__lock irq_context: softirq rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex (&timer.timer) irq_context: 0 rcu_tasks_trace.tasks_gp_mutex (console_sem).lock irq_context: 0 subsys mutex#2 irq_context: 0 subsys mutex#3 irq_context: 0 dev_pm_qos_mtx irq_context: 0 dev_pm_qos_mtx fs_reclaim irq_context: 0 dev_pm_qos_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 dev_pm_qos_mtx pool_lock#2 irq_context: 0 dev_pm_qos_mtx &dev->power.lock irq_context: 0 dev_pm_qos_mtx pm_qos_lock irq_context: 0 dev_pm_qos_sysfs_mtx irq_context: 0 dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 dev_pm_qos_sysfs_mtx fs_reclaim irq_context: 0 dev_pm_qos_sysfs_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 dev_pm_qos_sysfs_mtx pool_lock#2 irq_context: 0 dev_pm_qos_sysfs_mtx lock irq_context: 0 dev_pm_qos_sysfs_mtx lock kernfs_idr_lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 dev_pm_qos_mtx &c->lock irq_context: 0 dev_pm_qos_mtx &pcp->lock &zone->lock irq_context: 0 dev_pm_qos_mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 dev_pm_qos_mtx &____s->seqcount irq_context: 0 register_lock irq_context: 0 register_lock proc_subdir_lock irq_context: 0 register_lock fs_reclaim irq_context: 0 register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_lock pool_lock#2 irq_context: 0 register_lock proc_inum_ida.xa_lock irq_context: 0 register_lock proc_subdir_lock irq_context: 0 register_lock &c->lock irq_context: 0 register_lock &____s->seqcount irq_context: 0 register_lock proc_inum_ida.xa_lock pool_lock#2 irq_context: 0 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 batched_entropy_u64.lock crngs.lock base_crng.lock irq_context: 0 &x->wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_pm_notifier.lock irq_context: 0 (cpufreq_policy_notifier_list).rwsem irq_context: 0 (pm_chain_head).rwsem irq_context: 0 cpufreq_governor_mutex irq_context: 0 rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 rcu_state.exp_mutex &rq->__lock irq_context: 0 rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] irq_context: 0 clocksource_mutex irq_context: 0 syscore_ops_lock irq_context: softirq (&timer.timer) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 binfmt_lock irq_context: 0 pin_fs_lock irq_context: 0 &type->s_umount_key#7/1 irq_context: 0 &type->s_umount_key#7/1 fs_reclaim irq_context: 0 &type->s_umount_key#7/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#7/1 pool_lock#2 irq_context: 0 &type->s_umount_key#7/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#7/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#7/1 shrinker_mutex irq_context: 0 &type->s_umount_key#7/1 &____s->seqcount irq_context: 0 &type->s_umount_key#7/1 &c->lock irq_context: 0 &type->s_umount_key#7/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#7/1 sb_lock irq_context: 0 &type->s_umount_key#7/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#7/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#7/1 &sb->s_type->i_lock_key#6 irq_context: 0 &type->s_umount_key#7/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#7/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#7/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#7/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#7/1 &sb->s_type->i_lock_key#6 &dentry->d_lock irq_context: 0 &type->s_umount_key#7/1 &dentry->d_lock irq_context: 0 rcu_read_lock mount_lock irq_context: 0 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 irq_context: 0 &sb->s_type->i_mutex_key#2 &sb->s_type->i_lock_key#6 irq_context: 0 &sb->s_type->i_mutex_key#2 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key#2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 &sb->s_type->i_lock_key#6 &dentry->d_lock irq_context: 0 &type->s_umount_key#8/1 irq_context: 0 &type->s_umount_key#8/1 fs_reclaim irq_context: 0 &type->s_umount_key#8/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#8/1 pool_lock#2 irq_context: 0 &type->s_umount_key#8/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#8/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#8/1 shrinker_mutex irq_context: 0 &type->s_umount_key#8/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#8/1 sb_lock irq_context: 0 &type->s_umount_key#8/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#8/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#8/1 &sb->s_type->i_lock_key#7 irq_context: 0 &type->s_umount_key#8/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#8/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#8/1 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &type->s_umount_key#8/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#3 irq_context: 0 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 chrdevs_lock irq_context: 0 cb_lock irq_context: 0 cb_lock genl_mutex irq_context: 0 cb_lock genl_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex pool_lock#2 irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) (&timer.timer) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 subsys mutex#4 irq_context: hardirq allocation_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 async_lock irq_context: 0 rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_read_lock &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 regulator_list_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex device_links_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fwnode_link_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex device_links_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->devres_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_list_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_maps_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#9 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex regulator_nesting_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex regulator_ww_class_mutex regulator_nesting_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex devtree_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex bus_type_sem irq_context: 0 &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (reboot_notifier_list).rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &root->kernfs_rwsem irq_context: 0 purge_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dpm_list_mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#5 irq_context: 0 purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#5 &k->k_lock irq_context: 0 purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pin_fs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 purge_vmap_area_lock &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex regulator_list_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &x->wait#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex deferred_probe_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex deferred_probe_mutex &rq->__lock irq_context: 0 &x->wait#6 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#6 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex probe_waitqueue.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock irq_context: 0 &type->s_umount_key#9/1 irq_context: 0 &type->s_umount_key#9/1 fs_reclaim irq_context: 0 &type->s_umount_key#9/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#9/1 pool_lock#2 irq_context: 0 &type->s_umount_key#9/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#9/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#9/1 shrinker_mutex irq_context: 0 &type->s_umount_key#9/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#9/1 sb_lock irq_context: 0 &type->s_umount_key#9/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#9/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#9/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#9/1 &____s->seqcount irq_context: 0 &type->s_umount_key#9/1 &c->lock irq_context: 0 &type->s_umount_key#9/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#9/1 &sb->s_type->i_lock_key#8 irq_context: 0 &type->s_umount_key#9/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#9/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#9/1 &sb->s_type->i_lock_key#8 &dentry->d_lock irq_context: 0 &type->s_umount_key#9/1 &dentry->d_lock irq_context: 0 pernet_ops_rwsem fs_reclaim irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem &____s->seqcount irq_context: 0 pernet_ops_rwsem &c->lock irq_context: 0 pernet_ops_rwsem sysctl_lock irq_context: 0 vmap_purge_lock irq_context: 0 vmap_purge_lock purge_vmap_area_lock irq_context: 0 vmap_purge_lock free_vmap_area_lock irq_context: 0 vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 &fp->aux->used_maps_mutex irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pcpu_lock irq_context: 0 proto_list_mutex irq_context: 0 targets_mutex irq_context: 0 nl_table_lock irq_context: 0 nl_table_wait.lock irq_context: 0 net_family_lock irq_context: 0 pernet_ops_rwsem net_generic_ids.xa_lock irq_context: 0 pernet_ops_rwsem mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem &dir->lock irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex irq_context: 0 rtnl_mutex fs_reclaim irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sparse_irq_lock irq_context: 0 sparse_irq_lock fs_reclaim irq_context: 0 sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sparse_irq_lock pool_lock#2 irq_context: 0 sparse_irq_lock &c->lock irq_context: 0 sparse_irq_lock &____s->seqcount irq_context: 0 sparse_irq_lock lock irq_context: 0 sparse_irq_lock lock kernfs_idr_lock irq_context: 0 sparse_irq_lock &root->kernfs_rwsem irq_context: 0 sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sparse_irq_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &cma->lock irq_context: 0 cma_mutex irq_context: 0 cma_mutex &zone->lock irq_context: 0 cma_mutex &zone->lock &____s->seqcount irq_context: 0 cma_mutex pcpu_drain_mutex &pcp->lock irq_context: 0 cma_mutex pcpu_drain_mutex &pcp->lock &zone->lock irq_context: 0 cma_mutex pcpu_drain_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 cma_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cma_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 cma_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 cma_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cma_mutex &obj_hash[i].lock irq_context: 0 cma_mutex lock#2 irq_context: 0 &pool->lock#2 irq_context: 0 cma_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 cma_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 freezer_lock irq_context: 0 audit_backlog_wait.lock irq_context: 0 &list->lock irq_context: 0 kauditd_wait.lock irq_context: 0 kauditd_wait.lock &p->pi_lock irq_context: 0 lock#3 irq_context: 0 lock#3 &zone->lock irq_context: 0 pcp_batch_high_lock irq_context: 0 khugepaged_mutex irq_context: 0 &(&priv->bus_notifier)->rwsem irq_context: 0 gdp_mutex irq_context: 0 gdp_mutex &k->list_lock irq_context: 0 gdp_mutex fs_reclaim irq_context: 0 gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 gdp_mutex pool_lock#2 irq_context: 0 gdp_mutex lock irq_context: 0 gdp_mutex lock kernfs_idr_lock irq_context: 0 gdp_mutex &root->kernfs_rwsem irq_context: 0 gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 subsys mutex#6 irq_context: 0 subsys mutex#6 &k->k_lock irq_context: 0 subsys mutex#7 irq_context: 0 subsys mutex#7 &k->list_lock irq_context: 0 subsys mutex#7 &k->k_lock irq_context: 0 regmap_debugfs_early_lock irq_context: 0 (acpi_reconfig_chain).rwsem irq_context: 0 __i2c_board_lock irq_context: 0 quarantine_lock irq_context: 0 core_lock irq_context: 0 core_lock &k->list_lock irq_context: 0 core_lock &k->k_lock irq_context: 0 cb_lock genl_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex nl_table_wait.lock irq_context: 0 remove_cache_srcu irq_context: 0 remove_cache_srcu quarantine_lock irq_context: 0 nl_table_lock irq_context: 0 thermal_governor_lock irq_context: 0 thermal_governor_lock thermal_list_lock irq_context: 0 cpuidle_lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 cpuidle_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpuidle_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 cpuidle_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpuidle_lock &obj_hash[i].lock irq_context: 0 cpuidle_lock (console_sem).lock irq_context: 0 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_lock_key#8 irq_context: 0 &dir->lock irq_context: 0 k-sk_lock-AF_QIPCRTR irq_context: 0 k-sk_lock-AF_QIPCRTR k-slock-AF_QIPCRTR irq_context: 0 k-slock-AF_QIPCRTR irq_context: 0 k-sk_lock-AF_QIPCRTR fs_reclaim irq_context: 0 k-sk_lock-AF_QIPCRTR fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 k-sk_lock-AF_QIPCRTR qrtr_ports.xa_lock irq_context: 0 k-sk_lock-AF_QIPCRTR pool_lock#2 irq_context: 0 k-sk_lock-AF_QIPCRTR qrtr_node_lock irq_context: 0 k-sk_lock-AF_QIPCRTR &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex irq_context: 0 resource_lock pool_lock#2 irq_context: 0 resource_lock &obj_hash[i].lock irq_context: 0 resource_lock &c->lock irq_context: 0 resource_lock &____s->seqcount irq_context: 0 crngs.lock irq_context: 0 (crypto_chain).rwsem irq_context: 0 tty_mutex irq_context: 0 iova_cache_mutex irq_context: 0 iova_cache_mutex cpu_hotplug_lock irq_context: 0 iova_cache_mutex cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 iova_cache_mutex slab_mutex irq_context: 0 iova_cache_mutex slab_mutex fs_reclaim irq_context: 0 iova_cache_mutex slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 iova_cache_mutex slab_mutex pool_lock#2 irq_context: 0 iova_cache_mutex slab_mutex pcpu_alloc_mutex irq_context: 0 iova_cache_mutex slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 gdp_mutex &c->lock irq_context: 0 gdp_mutex &____s->seqcount irq_context: 0 subsys mutex#8 irq_context: 0 subsys mutex#8 &k->k_lock irq_context: 0 device_links_lock irq_context: 0 uidhash_lock irq_context: hardirq &retval->lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) irq_context: 0 oom_reaper_wait.lock irq_context: 0 subsys mutex#9 irq_context: 0 &pgdat->kcompactd_wait irq_context: 0 hugetlb_lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &obj_hash[i].lock irq_context: 0 memory_tier_lock irq_context: 0 memory_tier_lock fs_reclaim irq_context: 0 memory_tier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 memory_tier_lock pool_lock#2 irq_context: 0 memory_tier_lock &x->wait#9 irq_context: 0 memory_tier_lock &obj_hash[i].lock irq_context: 0 memory_tier_lock &k->list_lock irq_context: 0 memory_tier_lock &c->lock irq_context: 0 memory_tier_lock &____s->seqcount irq_context: 0 memory_tier_lock lock irq_context: 0 memory_tier_lock lock kernfs_idr_lock irq_context: 0 memory_tier_lock &root->kernfs_rwsem irq_context: 0 memory_tier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 memory_tier_lock bus_type_sem irq_context: 0 memory_tier_lock sysfs_symlink_target_lock irq_context: 0 memory_tier_lock &k->k_lock irq_context: 0 memory_tier_lock &root->kernfs_rwsem irq_context: 0 memory_tier_lock &dev->power.lock irq_context: 0 memory_tier_lock dpm_list_mtx irq_context: 0 memory_tier_lock uevent_sock_mutex irq_context: 0 memory_tier_lock running_helpers_waitq.lock irq_context: 0 memory_tier_lock &dev->mutex &k->list_lock irq_context: 0 memory_tier_lock &dev->mutex &k->k_lock irq_context: 0 memory_tier_lock &dev->mutex &dev->power.lock irq_context: 0 memory_tier_lock subsys mutex#10 irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 memory_tier_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 memory_tier_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 khugepaged_mutex fs_reclaim irq_context: 0 khugepaged_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 khugepaged_mutex pool_lock#2 irq_context: 0 khugepaged_mutex kthread_create_lock irq_context: 0 khugepaged_mutex &p->pi_lock irq_context: 0 khugepaged_mutex &p->pi_lock &rq->__lock irq_context: 0 khugepaged_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 khugepaged_mutex &x->wait irq_context: 0 khugepaged_mutex &rq->__lock irq_context: 0 khugepaged_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ksm_thread_mutex irq_context: 0 ksm_thread_wait.lock irq_context: 0 khugepaged_mutex &obj_hash[i].lock irq_context: 0 khugepaged_mutex lock#3 irq_context: 0 khugepaged_mutex lock#3 &zone->lock irq_context: 0 khugepaged_mutex pcp_batch_high_lock irq_context: 0 cgroup_mutex fs_reclaim irq_context: 0 cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 damon_ops_lock irq_context: 0 crypto_alg_sem irq_context: 0 crypto_alg_sem (crypto_chain).rwsem irq_context: 0 cpu_hotplug_lock fs_reclaim irq_context: 0 cpu_hotplug_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock pcpu_alloc_mutex irq_context: 0 cpu_hotplug_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 cpu_hotplug_lock &wq->mutex irq_context: 0 cpu_hotplug_lock &wq->mutex &pool->lock irq_context: 0 cpu_hotplug_lock kthread_create_lock irq_context: 0 cpu_hotplug_lock &p->pi_lock irq_context: 0 cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock &rq->__lock irq_context: 0 lock#2 irq_context: 0 khugepaged_mm_lock irq_context: 0 khugepaged_wait.lock irq_context: 0 cpu_hotplug_lock &x->wait irq_context: 0 cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock &p->pi_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock &____s->seqcount irq_context: 0 cpu_hotplug_lock hrtimer_bases.lock irq_context: 0 cpu_hotplug_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 cpu_hotplug_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 bio_slab_lock irq_context: 0 bio_slab_lock fs_reclaim irq_context: 0 bio_slab_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 bio_slab_lock pool_lock#2 irq_context: 0 bio_slab_lock slab_mutex irq_context: 0 bio_slab_lock slab_mutex fs_reclaim irq_context: 0 bio_slab_lock slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 bio_slab_lock slab_mutex pool_lock#2 irq_context: 0 bio_slab_lock slab_mutex &c->lock irq_context: 0 bio_slab_lock slab_mutex &____s->seqcount irq_context: 0 bio_slab_lock slab_mutex pcpu_alloc_mutex irq_context: 0 bio_slab_lock slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 bio_slab_lock bio_slabs.xa_lock irq_context: 0 bio_slab_lock bio_slabs.xa_lock pool_lock#2 irq_context: 0 major_names_lock irq_context: 0 major_names_lock fs_reclaim irq_context: 0 major_names_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 major_names_lock pool_lock#2 irq_context: 0 major_names_lock major_names_spinlock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) pool_lock#2 irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock irq_context: softirq (&pool->mayday_timer) &pool->lock irq_context: softirq (&pool->mayday_timer) &pool->lock wq_mayday_lock irq_context: softirq (&pool->mayday_timer) &obj_hash[i].lock irq_context: softirq (&pool->mayday_timer) &base->lock irq_context: softirq (&pool->mayday_timer) &base->lock &obj_hash[i].lock irq_context: softirq (&rtpcp->lazy_timer) irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock &obj_hash[i].lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock &base->lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock &base->lock &obj_hash[i].lock irq_context: 0 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex (console_sem).lock irq_context: 0 &pgdat->kswapd_lock irq_context: softirq drivers/char/random.c:251 irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (next_reseed).work irq_context: 0 (wq_completion)events_unbound (next_reseed).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work &base->lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work input_pool.lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work base_crng.lock irq_context: softirq mm/vmstat.c:2022 irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pool->lock &base->lock irq_context: 0 &pool->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (shepherd).work irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (shepherd).work &obj_hash[i].lock irq_context: 0 (wq_completion)events (shepherd).work &base->lock irq_context: 0 (wq_completion)events (shepherd).work &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mm_percpu_wq irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &obj_hash[i].lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &base->lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &base->lock &obj_hash[i].lock irq_context: 0 slab_mutex batched_entropy_u8.lock irq_context: 0 slab_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 slab_mutex batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 slab_mutex kfence_freelist_lock irq_context: 0 console_lock fs_reclaim irq_context: 0 console_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 console_lock &x->wait#9 irq_context: 0 console_lock &k->list_lock irq_context: 0 console_lock gdp_mutex irq_context: 0 console_lock gdp_mutex &k->list_lock irq_context: 0 console_lock gdp_mutex fs_reclaim irq_context: 0 console_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 console_lock gdp_mutex pool_lock#2 irq_context: 0 console_lock gdp_mutex lock irq_context: 0 console_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 console_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 console_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 console_lock lock irq_context: 0 console_lock lock kernfs_idr_lock irq_context: 0 console_lock &root->kernfs_rwsem irq_context: 0 console_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 console_lock bus_type_sem irq_context: 0 console_lock sysfs_symlink_target_lock irq_context: 0 console_lock &root->kernfs_rwsem irq_context: 0 console_lock &dev->power.lock irq_context: 0 console_lock dpm_list_mtx irq_context: 0 console_lock uevent_sock_mutex irq_context: 0 console_lock running_helpers_waitq.lock irq_context: 0 console_lock subsys mutex#11 irq_context: 0 console_lock subsys mutex#11 &k->k_lock irq_context: 0 shrink_qlist.lock irq_context: 0 remove_cache_srcu_srcu_usage.lock irq_context: 0 remove_cache_srcu_srcu_usage.lock &obj_hash[i].lock irq_context: 0 &ACCESS_PRIVATE(sdp, lock) irq_context: 0 remove_cache_srcu irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex remove_cache_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) remove_cache_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#3 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex remove_cache_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock flush_lock irq_context: 0 cpu_hotplug_lock flush_lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock flush_lock (work_completion)(&sfw->work) irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock (wq_completion)slub_flushwq irq_context: 0 cpu_hotplug_lock flush_lock &x->wait#10 irq_context: 0 cpu_hotplug_lock flush_lock &rq->__lock irq_context: 0 cpu_hotplug_lock flush_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)slub_flushwq irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) &c->lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) &n->list_lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) &obj_hash[i].lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &base->lock &obj_hash[i].lock irq_context: softirq &(&ssp->srcu_sup->work)->timer irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &n->list_lock &c->lock irq_context: 0 clk_debug_lock irq_context: 0 clocks_mutex irq_context: 0 acpi_scan_lock irq_context: 0 acpi_scan_lock semaphore->lock irq_context: 0 acpi_scan_lock fs_reclaim irq_context: 0 acpi_scan_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock pool_lock#2 irq_context: 0 acpi_scan_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &x->wait#9 irq_context: 0 acpi_scan_lock acpi_device_lock irq_context: 0 acpi_scan_lock acpi_device_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_device_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_device_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_device_lock &xa->xa_lock#2 irq_context: 0 acpi_scan_lock acpi_device_lock semaphore->lock irq_context: 0 acpi_scan_lock acpi_device_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &k->list_lock irq_context: 0 acpi_scan_lock lock irq_context: 0 acpi_scan_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock bus_type_sem irq_context: 0 acpi_scan_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &k->k_lock irq_context: 0 acpi_scan_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &c->lock irq_context: 0 acpi_scan_lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->power.lock irq_context: 0 acpi_scan_lock dpm_list_mtx irq_context: 0 acpi_scan_lock &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock subsys mutex#12 irq_context: 0 acpi_scan_lock uevent_sock_mutex irq_context: 0 acpi_scan_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock running_helpers_waitq.lock irq_context: 0 acpi_scan_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock &pcp->lock &zone->lock irq_context: 0 acpi_scan_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_device_lock &c->lock irq_context: 0 acpi_scan_lock acpi_device_lock &____s->seqcount irq_context: 0 acpi_scan_lock &n->list_lock irq_context: 0 acpi_scan_lock &n->list_lock &c->lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_ioremap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_ioremap_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_ioremap_lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_ioremap_lock &c->lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock free_vmap_area_lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock free_vmap_area_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_ioremap_lock vmap_area_lock irq_context: 0 acpi_scan_lock quarantine_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock fs_reclaim irq_context: 0 acpi_scan_lock &device->physical_node_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &device->physical_node_lock pool_lock#2 irq_context: 0 acpi_scan_lock &device->physical_node_lock lock irq_context: 0 acpi_scan_lock &device->physical_node_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &device->physical_node_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &device->physical_node_lock &c->lock irq_context: 0 acpi_scan_lock &device->physical_node_lock &____s->seqcount irq_context: 0 acpi_scan_lock irq_domain_mutex irq_context: 0 acpi_scan_lock &domain->mutex irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &c->lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &domain->mutex &irq_desc_lock_class irq_context: 0 acpi_scan_lock &domain->mutex fs_reclaim irq_context: 0 acpi_scan_lock &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &domain->mutex pool_lock#2 irq_context: 0 acpi_scan_lock resource_lock irq_context: 0 acpi_scan_lock &(&priv->bus_notifier)->rwsem irq_context: 0 acpi_scan_lock &(&priv->bus_notifier)->rwsem &device->physical_node_lock irq_context: 0 acpi_scan_lock fwnode_link_lock irq_context: 0 acpi_scan_lock fwnode_link_lock &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex &device->physical_node_lock irq_context: 0 acpi_scan_lock &dev->mutex device_links_srcu irq_context: 0 acpi_scan_lock &dev->mutex fwnode_link_lock irq_context: 0 acpi_scan_lock &dev->mutex device_links_lock irq_context: 0 acpi_scan_lock &dev->mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex &dev->devres_lock irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_list_mutex irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_maps_mutex irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 acpi_scan_lock &dev->mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex lock irq_context: 0 acpi_scan_lock &dev->mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock semaphore->lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock wakeup_ida.xa_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &x->wait#9 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock bus_type_sem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock events_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_wakeup_lock irq_context: 0 acpi_scan_lock &dev->mutex resource_lock irq_context: 0 acpi_scan_lock &dev->mutex free_vmap_area_lock irq_context: 0 acpi_scan_lock &dev->mutex vmap_area_lock irq_context: 0 acpi_scan_lock &dev->mutex init_mm.page_table_lock irq_context: 0 acpi_scan_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock &dev->mutex chrdevs_lock irq_context: 0 acpi_scan_lock &dev->mutex tty_mutex irq_context: 0 acpi_scan_lock &dev->mutex proc_subdir_lock irq_context: 0 acpi_scan_lock &dev->mutex proc_inum_ida.xa_lock irq_context: 0 acpi_scan_lock &dev->mutex proc_subdir_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &x->wait#9 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex bus_type_sem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex dpm_list_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex device_links_srcu irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex fwnode_link_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex device_links_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->devres_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_list_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex deferred_probe_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex probe_waitqueue.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex subsys mutex#14 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &xa->xa_lock#3 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &rq->__lock irq_context: 0 (wq_completion)pm irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock &dev->power.wait_queue irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex (console_sem).lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex syslog_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex (console_sem).lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &rsp->gp_wait irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) pcpu_lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: softirq &(&group->avgs_work)->timer irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq mm/memcontrol.c:679 irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work cgroup_rstat_lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &base->lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &base->lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex &rq->__lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex ctrl_ida.xa_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &x->wait#9 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex semaphore->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dpm_list_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex subsys mutex#15 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &n->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &n->list_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &k->k_lock klist_remove_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex deferred_probe_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex device_links_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex bus_type_sem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &rq->__lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex req_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &p->pi_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &x->wait#11 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers irq_context: 0 sb_writers mount_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 fs_reclaim irq_context: 0 sb_writers &type->i_mutex_dir_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sbinfo->stat_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &s->s_inode_list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tk_core.seq.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u32.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &xattrs->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &xattrs->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 tk_core.seq.seqcount irq_context: 0 &x->wait#11 irq_context: 0 &x->wait#11 &p->pi_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex subsys mutex#16 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex subsys mutex#16 &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex chrdevs_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &pcp->lock &zone->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex fwnode_link_lock &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex deferred_probe_mutex irq_context: 0 acpi_scan_lock &dev->mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex probe_waitqueue.lock irq_context: 0 acpi_scan_lock subsys mutex#4 irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &pcp->lock &zone->lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock (console_sem).lock irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 acpi_scan_lock batched_entropy_u8.lock irq_context: 0 acpi_scan_lock batched_entropy_u8.lock crngs.lock irq_context: 0 acpi_scan_lock batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 acpi_scan_lock kfence_freelist_lock irq_context: 0 acpi_scan_lock &meta->lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock free_vmap_area_lock irq_context: 0 acpi_scan_lock vmap_area_lock irq_context: 0 acpi_scan_lock init_mm.page_table_lock irq_context: 0 acpi_scan_lock io_range_mutex irq_context: 0 acpi_scan_lock pci_bus_sem irq_context: 0 acpi_scan_lock gdp_mutex irq_context: 0 acpi_scan_lock gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock gdp_mutex lock irq_context: 0 acpi_scan_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock subsys mutex#17 irq_context: 0 acpi_scan_lock subsys mutex#17 &k->k_lock irq_context: 0 acpi_scan_lock acpi_hp_context_lock irq_context: 0 acpi_scan_lock acpi_hp_context_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_hp_context_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_hp_context_lock pool_lock#2 irq_context: 0 acpi_scan_lock bridge_mutex irq_context: 0 acpi_scan_lock pci_bus_sem irq_context: 0 acpi_scan_lock pci_lock irq_context: 0 acpi_scan_lock pci_acpi_companion_lookup_sem irq_context: 0 acpi_scan_lock pci_slot_mutex irq_context: 0 acpi_scan_lock resource_alignment_lock irq_context: 0 acpi_scan_lock device_links_srcu irq_context: 0 acpi_scan_lock &dev->power.lock &dev->power.lock/1 irq_context: 0 acpi_scan_lock iort_msi_chip_lock irq_context: 0 acpi_scan_lock subsys mutex#18 irq_context: 0 acpi_scan_lock devtree_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock semaphore->lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock &c->lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock wakeup_ida.xa_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &x->wait#9 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->list_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock bus_type_sem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &c->lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock uevent_sock_mutex irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock running_helpers_waitq.lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->k_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 &k->k_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock events_lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock acpi_link_lock irq_context: 0 acpi_scan_lock acpi_link_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_link_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_link_lock semaphore->lock irq_context: 0 acpi_scan_lock acpi_link_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock acpi_link_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock acpi_link_lock (console_sem).lock irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 acpi_scan_lock acpi_link_lock &c->lock irq_context: 0 acpi_scan_lock acpi_link_lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_dep_list_lock irq_context: 0 acpi_scan_lock power_resource_list_lock irq_context: 0 acpi_device_lock irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem pool_lock#2 irq_context: 0 klist_remove_lock irq_context: 0 &k->k_lock klist_remove_lock irq_context: 0 kernfs_idr_lock irq_context: 0 console_lock console_srcu console_owner_lock irq_context: 0 console_lock console_srcu console_owner irq_context: 0 console_lock console_srcu console_owner &port_lock_key irq_context: 0 console_lock console_srcu console_owner console_owner_lock irq_context: 0 k-sk_lock-AF_NETLINK irq_context: 0 k-sk_lock-AF_NETLINK k-slock-AF_NETLINK irq_context: 0 k-sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 k-slock-AF_NETLINK irq_context: 0 cpu_hotplug_lock cpuhp_state-up fs_reclaim irq_context: 0 cpu_hotplug_lock cpuhp_state-up fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock cpuhp_state-up lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up lock kernfs_idr_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &root->kernfs_rwsem irq_context: 0 cpu_hotplug_lock cpuhp_state-up &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &type->s_umount_key#10/1 irq_context: 0 &type->s_umount_key#10/1 fs_reclaim irq_context: 0 &type->s_umount_key#10/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#10/1 pool_lock#2 irq_context: 0 &type->s_umount_key#10/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#10/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#10/1 shrinker_mutex irq_context: 0 &type->s_umount_key#10/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#10/1 sb_lock irq_context: 0 &type->s_umount_key#10/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#10/1 &____s->seqcount irq_context: 0 &type->s_umount_key#10/1 &c->lock irq_context: 0 &type->s_umount_key#10/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#10/1 &sb->s_type->i_lock_key#9 irq_context: 0 &type->s_umount_key#10/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#10/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#10/1 &sb->s_type->i_lock_key#9 &dentry->d_lock irq_context: 0 &type->s_umount_key#10/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#11/1 irq_context: 0 &type->s_umount_key#11/1 fs_reclaim irq_context: 0 &type->s_umount_key#11/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#11/1 pool_lock#2 irq_context: 0 &type->s_umount_key#11/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#11/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#11/1 shrinker_mutex irq_context: 0 &type->s_umount_key#11/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#11/1 sb_lock irq_context: 0 &type->s_umount_key#11/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#11/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#11/1 &sb->s_type->i_lock_key#10 irq_context: 0 &type->s_umount_key#11/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#11/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#11/1 &c->lock irq_context: 0 &type->s_umount_key#11/1 &____s->seqcount irq_context: 0 &type->s_umount_key#11/1 &sb->s_type->i_lock_key#10 &dentry->d_lock irq_context: 0 &type->s_umount_key#11/1 &dentry->d_lock irq_context: 0 &mm->mmap_lock irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim &mapping->i_mmap_rwsem irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start dma_fence_map irq_context: 0 &mm->mmap_lock irq_context: 0 key irq_context: 0 attribute_container_mutex irq_context: 0 triggers_list_lock irq_context: 0 leds_list_lock irq_context: 0 bus_type_sem irq_context: 0 (usb_notifier_list).rwsem irq_context: 0 &device->physical_node_lock irq_context: 0 rc_map_lock irq_context: 0 &root->kernfs_rwsem &____s->seqcount irq_context: 0 pci_lock irq_context: 0 subsys mutex#19 irq_context: 0 &(&priv->bus_notifier)->rwsem irq_context: 0 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock irq_context: 0 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock iommu_device_lock irq_context: 0 (efi_runtime_lock).lock irq_context: 0 &x->wait#12 irq_context: 0 (wq_completion)efi_rts_wq irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) cpu_asid_lock irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) efi_rt_lock irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock irq_context: 0 (efivars_lock).lock irq_context: 0 devfreq_list_lock irq_context: 0 &entry->access irq_context: 0 info_mutex irq_context: 0 info_mutex proc_subdir_lock irq_context: 0 info_mutex fs_reclaim irq_context: 0 info_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 info_mutex &c->lock irq_context: 0 info_mutex &____s->seqcount irq_context: 0 info_mutex pool_lock#2 irq_context: 0 info_mutex proc_inum_ida.xa_lock irq_context: 0 info_mutex proc_subdir_lock irq_context: 0 kobj_ns_type_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &x->wait#9 irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex bus_type_sem irq_context: 0 pernet_ops_rwsem rtnl_mutex sysfs_symlink_target_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex running_helpers_waitq.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &dir->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_base_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex input_pool.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_wait.lock irq_context: 0 rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock nl_table_lock irq_context: 0 rcu_read_lock nl_table_wait.lock irq_context: 0 qdisc_mod_lock irq_context: 0 bt_proto_lock irq_context: 0 hci_cb_list_lock irq_context: 0 mgmt_chan_list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 rate_ctrl_mutex irq_context: 0 rate_ctrl_mutex fs_reclaim irq_context: 0 rate_ctrl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rate_ctrl_mutex pool_lock#2 irq_context: 0 netlbl_domhsh_lock irq_context: 0 netlbl_unlhsh_lock irq_context: 0 rcu_read_lock netlbl_domhsh_lock irq_context: 0 rcu_read_lock netlbl_domhsh_lock pool_lock#2 irq_context: 0 misc_mtx irq_context: 0 misc_mtx fs_reclaim irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx pool_lock#2 irq_context: 0 misc_mtx &x->wait#9 irq_context: 0 misc_mtx &obj_hash[i].lock irq_context: 0 misc_mtx &____s->seqcount irq_context: 0 misc_mtx &k->list_lock irq_context: 0 misc_mtx gdp_mutex irq_context: 0 misc_mtx gdp_mutex &k->list_lock irq_context: 0 misc_mtx gdp_mutex fs_reclaim irq_context: 0 misc_mtx gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx gdp_mutex pool_lock#2 irq_context: 0 misc_mtx gdp_mutex lock irq_context: 0 misc_mtx gdp_mutex lock kernfs_idr_lock irq_context: 0 misc_mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx lock irq_context: 0 misc_mtx lock kernfs_idr_lock irq_context: 0 misc_mtx &root->kernfs_rwsem irq_context: 0 misc_mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx bus_type_sem irq_context: 0 misc_mtx sysfs_symlink_target_lock irq_context: 0 misc_mtx &c->lock irq_context: 0 misc_mtx &root->kernfs_rwsem irq_context: 0 misc_mtx &dev->power.lock irq_context: 0 misc_mtx dpm_list_mtx irq_context: 0 misc_mtx req_lock irq_context: 0 misc_mtx &p->pi_lock irq_context: 0 misc_mtx &x->wait#11 irq_context: 0 misc_mtx &rq->__lock irq_context: 0 misc_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx uevent_sock_mutex irq_context: 0 misc_mtx running_helpers_waitq.lock irq_context: 0 misc_mtx subsys mutex#21 irq_context: 0 misc_mtx subsys mutex#21 &k->k_lock irq_context: 0 input_mutex irq_context: 0 input_mutex input_devices_poll_wait.lock irq_context: 0 (netlink_chain).rwsem irq_context: 0 proto_tab_lock irq_context: 0 random_ready_notifier.lock irq_context: 0 random_ready_notifier.lock crngs.lock irq_context: 0 misc_mtx misc_minors_ida.xa_lock irq_context: 0 misc_mtx fs_reclaim &rq->__lock irq_context: 0 misc_mtx fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex irq_context: 0 misc_mtx &obj_hash[i].lock pool_lock irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock tk_core.seq.seqcount irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &rq->__lock &rt_rq->rt_runtime_lock irq_context: 0 wtd_deferred_reg_mutex irq_context: 0 &type->s_umount_key#12/1 irq_context: 0 &type->s_umount_key#12/1 fs_reclaim irq_context: 0 &type->s_umount_key#12/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#12/1 pool_lock#2 irq_context: 0 &type->s_umount_key#12/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#12/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#12/1 shrinker_mutex irq_context: 0 &type->s_umount_key#12/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#12/1 sb_lock irq_context: 0 &type->s_umount_key#12/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#12/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#12/1 &sb->s_type->i_lock_key#11 irq_context: 0 &type->s_umount_key#12/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#12/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#12/1 &sb->s_type->i_lock_key#11 &dentry->d_lock irq_context: 0 &type->s_umount_key#12/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#11 irq_context: 0 clocksource_mutex cpu_hotplug_lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stopper->lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &rq->__lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &x->wait#8 irq_context: 0 clocksource_mutex (console_sem).lock irq_context: 0 clocksource_mutex console_lock console_srcu console_owner_lock irq_context: 0 clocksource_mutex console_lock console_srcu console_owner irq_context: 0 clocksource_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 clocksource_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem irq_context: 0 &type->s_umount_key#13/1 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#13/1 fs_reclaim irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem pool_lock#2 irq_context: 0 &type->s_umount_key#13/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &type->s_umount_key#13/1 pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 &type->s_umount_key#13/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#13/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &type->s_umount_key#13/1 shrinker_mutex irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &type->s_umount_key#13/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#26/1 proc_subdir_lock irq_context: 0 &type->s_umount_key#26/1 proc_inum_ida.xa_lock irq_context: 0 &type->s_umount_key#13/1 sb_lock irq_context: 0 &type->s_umount_key#26/1 proc_subdir_lock irq_context: 0 &type->s_umount_key#13/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#26/1 &journal->j_state_lock irq_context: 0 &type->s_umount_key#26/1 kthread_create_lock irq_context: 0 &type->s_umount_key#13/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#13/1 &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 &p->pi_lock irq_context: 0 &type->s_umount_key#26/1 &x->wait irq_context: 0 &type->s_umount_key#13/1 &sb->s_type->i_lock_key#12 irq_context: 0 &type->s_umount_key#13/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#26/1 &journal->j_wait_done_commit irq_context: 0 &type->s_umount_key#13/1 tk_core.seq.seqcount irq_context: 0 &journal->j_wait_done_commit irq_context: 0 &type->s_umount_key#13/1 &sb->s_type->i_lock_key#12 &dentry->d_lock irq_context: 0 &type->s_umount_key#13/1 &dentry->d_lock irq_context: 0 &journal->j_wait_done_commit &p->pi_lock irq_context: 0 &journal->j_state_lock irq_context: 0 &journal->j_state_lock &journal->j_wait_done_commit irq_context: 0 &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &type->s_umount_key#26/1 &journal->j_state_lock irq_context: 0 &type->s_umount_key#26/1 &p->alloc_lock irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock irq_context: 0 &sb->s_type->i_mutex_key#5 irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex irq_context: 0 &sb->s_type->i_mutex_key#5 &sb->s_type->i_lock_key#12 irq_context: 0 &sb->s_type->i_mutex_key#5 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#5 fs_reclaim irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#5 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#5 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#5 &dentry->d_lock &wq irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#5 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#5 &c->lock irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 &sb->s_type->i_mutex_key#5 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#5 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#5 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#5 &sb->s_type->i_lock_key#12 &dentry->d_lock irq_context: 0 &type->s_umount_key#26/1 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#26/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#26/1 &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#26/1 wq_pool_mutex irq_context: 0 &type->s_umount_key#26/1 wq_pool_mutex &wq->mutex irq_context: 0 &type->s_umount_key#15/1 irq_context: 0 &type->s_umount_key#15/1 fs_reclaim irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &type->s_umount_key#15/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 &ei->i_es_lock irq_context: 0 &type->s_umount_key#15/1 pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex irq_context: 0 &type->s_umount_key#15/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex irq_context: 0 &type->s_umount_key#15/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex fs_reclaim irq_context: 0 &type->s_umount_key#15/1 shrinker_mutex irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &c->lock irq_context: 0 &type->s_umount_key#15/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#15/1 sb_lock irq_context: 0 &type->s_umount_key#15/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &____s->seqcount irq_context: 0 &type->s_umount_key#15/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex pool_lock#2 irq_context: 0 &type->s_umount_key#15/1 &sb->s_type->i_lock_key#13 irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#15/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#15/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#15/1 &sb->s_type->i_lock_key#13 &dentry->d_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &k->list_lock irq_context: 0 &type->s_umount_key#15/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex lock kernfs_idr_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &type->s_umount_key#26/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#26/1 ext4_li_mtx irq_context: 0 &type->s_umount_key#26/1 lock irq_context: 0 &type->s_umount_key#26/1 lock kernfs_idr_lock irq_context: 0 &type->s_umount_key#26/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#26/1 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &type->s_umount_key#26/1 lock kernfs_idr_lock pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 (console_sem).lock irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner_lock irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner &port_lock_key irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &type->s_umount_key#26/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#22 irq_context: 0 &type->i_mutex_dir_key#3 irq_context: 0 &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 &type->s_umount_key#16/1 irq_context: 0 &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#16/1 fs_reclaim irq_context: 0 &type->s_umount_key#16/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 &type->s_umount_key#16/1 &c->lock irq_context: 0 &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->s_umount_key#16/1 &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem irq_context: 0 &type->s_umount_key#16/1 pool_lock#2 irq_context: 0 &type->s_umount_key#16/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#16/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#16/1 shrinker_mutex irq_context: 0 &type->s_umount_key#16/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#16/1 sb_lock irq_context: 0 &type->s_umount_key#16/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#16/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#16/1 &sb->s_type->i_lock_key#14 irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem pool_lock#2 irq_context: 0 &type->s_umount_key#16/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#16/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#16/1 &sb->s_type->i_lock_key#14 &dentry->d_lock irq_context: 0 &type->s_umount_key#16/1 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#17/1 irq_context: 0 &type->s_umount_key#17/1 fs_reclaim irq_context: 0 &type->s_umount_key#17/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#17/1 pool_lock#2 irq_context: 0 &type->s_umount_key#17/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#17/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#17/1 &c->lock irq_context: 0 &type->s_umount_key#17/1 &____s->seqcount irq_context: 0 &type->s_umount_key#17/1 shrinker_mutex irq_context: 0 &type->s_umount_key#17/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#17/1 sb_lock irq_context: 0 &type->s_umount_key#17/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#17/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#17/1 &sb->s_type->i_lock_key#15 irq_context: 0 &type->s_umount_key#17/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#17/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#17/1 &sb->s_type->i_lock_key#15 &dentry->d_lock irq_context: 0 &type->s_umount_key#17/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#15 irq_context: 0 kclist_lock irq_context: 0 kclist_lock resource_lock irq_context: 0 kclist_lock fs_reclaim irq_context: 0 kclist_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kclist_lock pool_lock#2 irq_context: 0 &type->s_umount_key#18/1 irq_context: 0 &type->s_umount_key#18/1 fs_reclaim irq_context: 0 &type->s_umount_key#18/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#18/1 pool_lock#2 irq_context: 0 &type->s_umount_key#18/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#18/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#18/1 shrinker_mutex irq_context: 0 &type->s_umount_key#18/1 &c->lock irq_context: 0 &type->s_umount_key#18/1 &____s->seqcount irq_context: 0 &type->s_umount_key#18/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#18/1 sb_lock irq_context: 0 &type->s_umount_key#18/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#18/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#18/1 &sb->s_type->i_lock_key#16 irq_context: 0 &type->s_umount_key#18/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#18/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#18/1 &sb->s_type->i_lock_key#16 &dentry->d_lock irq_context: 0 &type->s_umount_key#18/1 &dentry->d_lock irq_context: 0 misc_mtx &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq tick_broadcast_lock irq_context: hardirq tick_broadcast_lock jiffies_lock irq_context: 0 &sb->s_type->i_mutex_key#2 &c->lock irq_context: 0 &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 &____s->seqcount irq_context: 0 (wq_completion)events timer_update_work irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 tomoyo_ss irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 tomoyo_ss pool_lock#2 irq_context: 0 tomoyo_ss tomoyo_policy_lock irq_context: 0 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 tomoyo_ss (console_sem).lock irq_context: 0 tomoyo_ss console_lock console_srcu console_owner_lock irq_context: 0 tomoyo_ss console_lock console_srcu console_owner irq_context: 0 tomoyo_ss console_lock console_srcu console_owner &port_lock_key irq_context: 0 tomoyo_ss console_lock console_srcu console_owner console_owner_lock irq_context: hardirq &rt_b->rt_runtime_lock irq_context: hardirq &rt_b->rt_runtime_lock tk_core.seq.seqcount irq_context: hardirq &rt_rq->rt_runtime_lock irq_context: 0 pnp_lock irq_context: 0 pnp_lock fs_reclaim irq_context: 0 pnp_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pnp_lock pool_lock#2 irq_context: 0 &device->physical_node_lock sysfs_symlink_target_lock irq_context: 0 &device->physical_node_lock fs_reclaim irq_context: 0 &device->physical_node_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &device->physical_node_lock pool_lock#2 irq_context: 0 &device->physical_node_lock lock irq_context: 0 &device->physical_node_lock lock kernfs_idr_lock irq_context: 0 &device->physical_node_lock &root->kernfs_rwsem irq_context: 0 &device->physical_node_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 fwnode_link_lock irq_context: 0 fwnode_link_lock &k->k_lock irq_context: 0 &dev->mutex device_links_srcu irq_context: 0 &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex device_links_lock irq_context: 0 &dev->mutex fs_reclaim irq_context: 0 &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex lock irq_context: 0 &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex pnp_lock irq_context: 0 &dev->mutex resource_lock irq_context: 0 &dev->mutex (console_sem).lock irq_context: 0 &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex console_lock console_srcu console_owner irq_context: 0 &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex fwnode_link_lock &k->k_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex probe_waitqueue.lock irq_context: 0 subsys mutex#22 irq_context: 0 &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 &x->wait#11 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#11 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 lock#4 irq_context: 0 &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#3 inode_hash_lock irq_context: 0 subsys mutex#23 irq_context: 0 &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 subsys mutex#23 &k->k_lock irq_context: 0 &type->i_mutex_dir_key#3 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 &type->i_mutex_dir_key#3 inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 &c->lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 jiffies_seq.seqcount irq_context: 0 &x->wait#11 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 subsys mutex#16 irq_context: 0 subsys mutex#16 &k->k_lock irq_context: 0 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 fill_pool_map-wait-type-override &c->lock irq_context: 0 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 fill_pool_map-wait-type-override pool_lock irq_context: 0 subsys mutex#24 irq_context: 0 subsys mutex#24 &k->k_lock irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock irq_context: softirq rcu_callback pcpu_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &pcp->lock &zone->lock &____s->seqcount irq_context: softirq led_lock irq_context: 0 misc_mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 subsys mutex#25 irq_context: 0 subsys mutex#25 &k->list_lock irq_context: 0 subsys mutex#25 &k->k_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &xa->xa_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex kthread_create_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &p->pi_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &x->wait irq_context: 0 cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock wq_pool_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex wq_pool_attach_mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock &p->pi_lock irq_context: 0 netevent_notif_chain.lock irq_context: 0 clients_rwsem irq_context: 0 clients_rwsem fs_reclaim irq_context: 0 clients_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 clients_rwsem clients.xa_lock irq_context: 0 devices_rwsem irq_context: 0 clients_rwsem clients.xa_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock wq_pool_mutex batched_entropy_u8.lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex kfence_freelist_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &meta->lock irq_context: 0 (blocking_lsm_notifier_chain).rwsem irq_context: 0 (inetaddr_chain).rwsem irq_context: 0 inet6addr_chain.lock irq_context: 0 buses_mutex irq_context: 0 offload_lock irq_context: 0 inetsw_lock irq_context: 0 (wq_completion)events pcpu_balance_work irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex fs_reclaim irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex pool_lock#2 irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex free_vmap_area_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex vmap_area_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &____s->seqcount irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex init_mm.page_table_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 ptype_lock irq_context: 0 (wq_completion)events_power_efficient irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &tbl->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work irq_context: 0 pernet_ops_rwsem &net->rules_mod_lock irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &base->lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem slab_mutex irq_context: 0 pernet_ops_rwsem slab_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem slab_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem slab_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock irq_context: 0 tcp_ulp_list_lock irq_context: 0 xfrm_state_afinfo_lock irq_context: 0 xfrm_policy_afinfo_lock irq_context: 0 xfrm_input_afinfo_lock irq_context: 0 pernet_ops_rwsem percpu_counters_lock irq_context: 0 rtnl_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex krc.lock irq_context: 0 rtnl_mutex krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex krc.lock hrtimer_bases.lock irq_context: 0 rtnl_mutex krc.lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex krc.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex krc.lock &base->lock irq_context: 0 rtnl_mutex krc.lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem k-slock-AF_INET/1 irq_context: 0 (wq_completion)events_highpri irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) fs_reclaim irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) &____s->seqcount irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) krc.lock irq_context: 0 &hashinfo->lock irq_context: 0 k-slock-AF_INET/1 irq_context: 0 tcp_cong_list_lock irq_context: 0 mptcp_sched_list_lock irq_context: 0 pernet_ops_rwsem cache_list_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) cache_list_lock irq_context: 0 pernet_ops_rwsem rcu_node_0 irq_context: 0 pernet_ops_rwsem &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (rpc_pipefs_notifier_list).rwsem irq_context: 0 svc_xprt_class_lock irq_context: 0 xprt_list_lock irq_context: 0 xprt_list_lock (console_sem).lock irq_context: 0 xprt_list_lock console_lock console_srcu console_owner_lock irq_context: 0 xprt_list_lock console_lock console_srcu console_owner irq_context: 0 xprt_list_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 xprt_list_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 umhelper_sem irq_context: 0 umhelper_sem usermodehelper_disabled_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock init_fs.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key &dentry->d_lock &wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 mount_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &s->s_inode_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_log_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_log_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_policy_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key &wb->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key &wb->list_lock &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &sighand->siglock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) free_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_mm.page_table_lock irq_context: 0 &drv->dynids.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u64.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_files.file_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_fs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_fs.lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &sighand->siglock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) pidmap_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem css_set_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem tasklist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem tasklist_lock &sighand->siglock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) input_pool.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 umh_sysctl_lock irq_context: 0 async_done.lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex rcu_read_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key irq_context: 0 &sb->s_type->i_mutex_key fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock pool_lock#2 irq_context: 0 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &dentry->d_lock pool_lock#2 irq_context: 0 &tsk->futex_exit_mutex irq_context: 0 &tsk->futex_exit_mutex &p->pi_lock irq_context: 0 &p->alloc_lock &fs->lock irq_context: 0 &child->perf_event_mutex irq_context: 0 css_set_lock irq_context: 0 tasklist_lock irq_context: 0 tasklist_lock &pid->wait_pidfd irq_context: 0 tasklist_lock &sighand->siglock irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit irq_context: 0 tasklist_lock &sighand->siglock input_pool.lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pool_lock#2 irq_context: 0 tasklist_lock &obj_hash[i].lock irq_context: 0 &cfs_rq->removed.lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 subsys mutex#26 irq_context: 0 &sb->s_type->i_mutex_key &c->lock irq_context: 0 &sb->s_type->i_mutex_key &____s->seqcount irq_context: 0 subsys mutex#27 irq_context: 0 subsys mutex#27 &k->list_lock irq_context: 0 subsys mutex#27 &k->k_lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u64.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &____s->seqcount irq_context: 0 subsys mutex#28 irq_context: 0 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &obj_hash[i].lock pool_lock irq_context: 0 pmus_lock fs_reclaim irq_context: 0 pmus_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pmus_lock &k->list_lock irq_context: 0 pmus_lock &c->lock irq_context: 0 pmus_lock &____s->seqcount irq_context: 0 pmus_lock lock irq_context: 0 pmus_lock lock kernfs_idr_lock irq_context: 0 pmus_lock &root->kernfs_rwsem irq_context: 0 pmus_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pmus_lock uevent_sock_mutex irq_context: 0 pmus_lock rcu_read_lock &pool->lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pmus_lock running_helpers_waitq.lock irq_context: 0 pmus_lock &rq->__lock irq_context: 0 pmus_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 pmus_lock &x->wait#9 irq_context: 0 pmus_lock bus_type_sem irq_context: 0 pmus_lock sysfs_symlink_target_lock irq_context: 0 pmus_lock &k->k_lock irq_context: 0 pmus_lock &root->kernfs_rwsem irq_context: 0 pmus_lock &dev->power.lock irq_context: 0 pmus_lock dpm_list_mtx irq_context: 0 pmus_lock &dev->mutex &k->list_lock irq_context: 0 pmus_lock &dev->mutex &k->k_lock irq_context: 0 pmus_lock &dev->mutex &dev->power.lock irq_context: 0 pmus_lock subsys mutex#29 irq_context: 0 pmus_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pmus_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pmus_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pmus_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 key_user_lock irq_context: 0 key_serial_lock irq_context: 0 key_construction_mutex irq_context: 0 &type->lock_class irq_context: 0 &type->lock_class keyring_serialise_link_lock irq_context: 0 &type->lock_class keyring_serialise_link_lock fs_reclaim irq_context: 0 &type->lock_class keyring_serialise_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->lock_class keyring_serialise_link_lock pool_lock#2 irq_context: 0 &type->lock_class keyring_serialise_link_lock &c->lock irq_context: 0 &type->lock_class keyring_serialise_link_lock &____s->seqcount irq_context: 0 &type->lock_class keyring_serialise_link_lock &obj_hash[i].lock irq_context: 0 keyring_serialise_link_lock irq_context: 0 &pgdat->kswapd_lock fs_reclaim irq_context: 0 &pgdat->kswapd_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &pgdat->kswapd_lock pool_lock#2 irq_context: 0 &pgdat->kswapd_lock kthread_create_lock irq_context: 0 &pgdat->kswapd_lock &p->pi_lock irq_context: 0 &pgdat->kswapd_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &pgdat->kswapd_lock &x->wait irq_context: 0 &pgdat->kswapd_lock &rq->__lock irq_context: 0 &pgdat->kswapd_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pgdat->kswapd_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pgdat->kswapd_lock &obj_hash[i].lock irq_context: 0 &pgdat->kswapd_wait irq_context: 0 list_lrus_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex fs_reclaim irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex pool_lock#2 irq_context: 0 drivers_lock irq_context: 0 damon_dbgfs_lock irq_context: 0 damon_dbgfs_lock fs_reclaim irq_context: 0 damon_dbgfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 damon_dbgfs_lock pool_lock#2 irq_context: 0 damon_dbgfs_lock damon_ops_lock irq_context: 0 damon_dbgfs_lock pin_fs_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 misc_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#19/1 irq_context: 0 &type->s_umount_key#19/1 fs_reclaim irq_context: 0 &type->s_umount_key#19/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#19/1 pool_lock#2 irq_context: 0 &type->s_umount_key#19/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#19/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#19/1 shrinker_mutex irq_context: 0 &type->s_umount_key#19/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#19/1 sb_lock irq_context: 0 &type->s_umount_key#19/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#19/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#19/1 &sb->s_type->i_lock_key#17 irq_context: 0 &type->s_umount_key#19/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#19/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#19/1 &sb->s_type->i_lock_key#17 &dentry->d_lock irq_context: 0 &type->s_umount_key#19/1 &dentry->d_lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->s_umount_key#20/1 irq_context: 0 &type->s_umount_key#20/1 fs_reclaim irq_context: 0 &type->s_umount_key#20/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#20/1 pool_lock#2 irq_context: 0 &type->s_umount_key#20/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#20/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#20/1 shrinker_mutex irq_context: 0 &type->s_umount_key#20/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#20/1 sb_lock irq_context: 0 &type->s_umount_key#20/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#20/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#20/1 &sb->s_type->i_lock_key#18 irq_context: 0 &type->s_umount_key#20/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#20/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#20/1 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &type->s_umount_key#20/1 &dentry->d_lock irq_context: 0 configfs_subsystem_mutex irq_context: 0 &sb->s_type->i_mutex_key#6/1 irq_context: 0 &sb->s_type->i_mutex_key#6/1 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 irq_context: 0 slab_mutex rcu_read_lock &pool->lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 slab_mutex &rq->__lock irq_context: 0 &ecryptfs_kthread_ctl.wait irq_context: 0 ecryptfs_daemon_hash_mux irq_context: 0 ecryptfs_daemon_hash_mux fs_reclaim irq_context: 0 ecryptfs_daemon_hash_mux fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ecryptfs_daemon_hash_mux pool_lock#2 irq_context: 0 ecryptfs_msg_ctx_lists_mux irq_context: 0 ecryptfs_msg_ctx_lists_mux &ecryptfs_msg_ctx_arr[i].mux irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq rcu_callback put_task_map-wait-type-override &obj_hash[i].lock irq_context: softirq rcu_callback put_task_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem &k->list_lock irq_context: 0 pernet_ops_rwsem lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem running_helpers_waitq.lock irq_context: 0 nfs_version_lock irq_context: 0 key_types_sem irq_context: 0 key_types_sem (console_sem).lock irq_context: 0 key_types_sem console_lock console_srcu console_owner_lock irq_context: 0 key_types_sem console_lock console_srcu console_owner irq_context: 0 key_types_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 key_types_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 pnfs_spinlock irq_context: 0 pernet_ops_rwsem &sn->pipefs_sb_lock irq_context: 0 pernet_ops_rwsem krc.lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem &s->s_inode_list_lock irq_context: 0 nls_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) irq_context: 0 (wq_completion)events (work_completion)(&p->wq) vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) pool_lock#2 irq_context: softirq rcu_callback &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &obj_hash[i].lock pool_lock irq_context: 0 jffs2_compressor_list_lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq &(&cache_cleaner)->timer irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 misc_mtx rcu_read_lock &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &meta->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 kfence_freelist_lock irq_context: 0 next_tag_value_lock irq_context: softirq (&tcp_orphan_timer) irq_context: softirq (&tcp_orphan_timer) &obj_hash[i].lock irq_context: softirq (&tcp_orphan_timer) &base->lock irq_context: softirq (&tcp_orphan_timer) &base->lock &obj_hash[i].lock irq_context: 0 log_redrive_lock irq_context: 0 &TxAnchor.LazyLock irq_context: 0 &TxAnchor.LazyLock jfs_commit_thread_wait.lock irq_context: 0 jfsTxnLock irq_context: 0 ocfs2_stack_lock irq_context: 0 ocfs2_stack_lock (console_sem).lock irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner_lock irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override pool_lock irq_context: 0 o2hb_callback_sem irq_context: 0 o2net_handler_lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 subsys mutex#30 irq_context: 0 subsys mutex#30 &k->k_lock irq_context: 0 &type->s_umount_key#21/1 irq_context: 0 &type->s_umount_key#21/1 fs_reclaim irq_context: 0 &type->s_umount_key#21/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#21/1 pool_lock#2 irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#21/1 shrinker_mutex irq_context: 0 &type->s_umount_key#21/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#21/1 sb_lock irq_context: 0 &type->s_umount_key#21/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#21/1 &____s->seqcount irq_context: 0 &type->s_umount_key#21/1 &c->lock irq_context: 0 &type->s_umount_key#21/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#21/1 &sb->s_type->i_lock_key#19 irq_context: 0 &type->s_umount_key#21/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#21/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#21/1 &sb->s_type->i_lock_key#19 &dentry->d_lock irq_context: 0 &type->s_umount_key#21/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#22/1 irq_context: 0 &type->s_umount_key#22/1 fs_reclaim irq_context: 0 &type->s_umount_key#22/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#22/1 pool_lock#2 irq_context: 0 &type->s_umount_key#22/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#22/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#22/1 shrinker_mutex irq_context: 0 &type->s_umount_key#22/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#22/1 &c->lock irq_context: 0 &type->s_umount_key#22/1 &____s->seqcount irq_context: 0 &type->s_umount_key#22/1 sb_lock irq_context: 0 &type->s_umount_key#22/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#22/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#22/1 &sb->s_type->i_lock_key#20 irq_context: 0 &type->s_umount_key#22/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#22/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#22/1 &sb->s_type->i_lock_key#20 &dentry->d_lock irq_context: 0 &type->s_umount_key#22/1 &dentry->d_lock irq_context: 0 cipso_v4_doi_list_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 crypto_alg_sem irq_context: 0 alg_types_sem irq_context: 0 alg_types_sem fs_reclaim irq_context: 0 alg_types_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 alg_types_sem pool_lock#2 irq_context: 0 dma_list_mutex irq_context: 0 asymmetric_key_parsers_sem irq_context: 0 asymmetric_key_parsers_sem (console_sem).lock irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner_lock irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 blkcg_pol_register_mutex irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex irq_context: 0 blkcg_pol_register_mutex cgroup_mutex irq_context: 0 blkcg_pol_register_mutex cgroup_mutex &root->kernfs_rwsem irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex fs_reclaim irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex pool_lock#2 irq_context: 0 blkcg_pol_register_mutex cgroup_mutex fs_reclaim irq_context: 0 blkcg_pol_register_mutex cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 blkcg_pol_register_mutex cgroup_mutex pool_lock#2 irq_context: 0 blkcg_pol_register_mutex cgroup_mutex lock irq_context: 0 blkcg_pol_register_mutex cgroup_mutex lock kernfs_idr_lock irq_context: 0 blkcg_pol_register_mutex cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 elv_list_lock irq_context: 0 crc_t10dif_mutex irq_context: 0 crc_t10dif_mutex crypto_alg_sem irq_context: 0 crc_t10dif_mutex fs_reclaim irq_context: 0 crc_t10dif_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 crc_t10dif_mutex pool_lock#2 irq_context: 0 crc64_rocksoft_mutex irq_context: 0 crc64_rocksoft_mutex crypto_alg_sem irq_context: 0 crc64_rocksoft_mutex fs_reclaim irq_context: 0 crc64_rocksoft_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 crc64_rocksoft_mutex pool_lock#2 irq_context: 0 ts_mod_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &c->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &cfs_rq->removed.lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &rq->__lock irq_context: 0 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 uevent_sock_mutex &rq->__lock irq_context: 0 uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 uevent_sock_mutex pool_lock#2 irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock &cfs_rq->removed.lock irq_context: hardirq allocation_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) quarantine_lock irq_context: 0 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 pci_ep_cfs_subsys.su_mutex irq_context: 0 &default_group_class[depth - 1]#2/1 irq_context: 0 &default_group_class[depth - 1]#2/1 fs_reclaim irq_context: 0 &default_group_class[depth - 1]#2/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &default_group_class[depth - 1]#2/1 pool_lock#2 irq_context: 0 &default_group_class[depth - 1]#2/1 &dentry->d_lock irq_context: 0 &default_group_class[depth - 1]#2/1 configfs_dirent_lock irq_context: 0 &default_group_class[depth - 1]#2/1 mmu_notifier_invalidate_range_start irq_context: 0 &default_group_class[depth - 1]#2/1 &sb->s_type->i_lock_key#18 irq_context: 0 &default_group_class[depth - 1]#2/1 &s->s_inode_list_lock irq_context: 0 &default_group_class[depth - 1]#2/1 tk_core.seq.seqcount irq_context: 0 &default_group_class[depth - 1]#2/1 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &default_group_class[depth - 1]#2/1 &sb->s_type->i_mutex_key#7/2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &sb->s_type->i_mutex_key#7/2 irq_context: 0 pci_epf_mutex irq_context: 0 ipmi_interfaces_mutex irq_context: 0 ipmi_interfaces_mutex &k->list_lock irq_context: 0 ipmi_interfaces_mutex fs_reclaim irq_context: 0 ipmi_interfaces_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ipmi_interfaces_mutex &c->lock irq_context: 0 ipmi_interfaces_mutex &____s->seqcount irq_context: 0 ipmi_interfaces_mutex pool_lock#2 irq_context: 0 ipmi_interfaces_mutex lock irq_context: 0 ipmi_interfaces_mutex lock kernfs_idr_lock irq_context: 0 ipmi_interfaces_mutex &root->kernfs_rwsem irq_context: 0 ipmi_interfaces_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 ipmi_interfaces_mutex &k->k_lock irq_context: 0 ipmi_interfaces_mutex uevent_sock_mutex irq_context: 0 ipmi_interfaces_mutex &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &pool->lock irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 ipmi_interfaces_mutex running_helpers_waitq.lock irq_context: 0 ipmi_interfaces_mutex pcpu_alloc_mutex irq_context: 0 ipmi_interfaces_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex kthread_create_lock irq_context: 0 ipmi_interfaces_mutex &p->pi_lock irq_context: 0 ipmi_interfaces_mutex &p->pi_lock &rq->__lock irq_context: 0 ipmi_interfaces_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ipmi_interfaces_mutex &rq->__lock irq_context: 0 ipmi_interfaces_mutex &x->wait irq_context: 0 ipmi_interfaces_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &rq->__lock irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_callback put_task_map-wait-type-override &obj_hash[i].lock pool_lock irq_context: softirq rcu_callback put_task_map-wait-type-override quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 ipmi_interfaces_mutex wq_pool_mutex irq_context: 0 ipmi_interfaces_mutex wq_pool_mutex &wq->mutex irq_context: 0 ipmi_interfaces_mutex &base->lock irq_context: 0 ipmi_interfaces_mutex &base->lock &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex panic_notifier_list.lock irq_context: 0 smi_watchers_mutex irq_context: 0 smi_watchers_mutex &ipmi_interfaces_srcu irq_context: 0 smi_infos_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex &device->physical_node_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock semaphore->lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock fs_reclaim irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock wakeup_ida.xa_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &x->wait#9 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->list_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &k->list_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock bus_type_sem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &c->lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &____s->seqcount irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock uevent_sock_mutex irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->k_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 &k->k_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock events_lock irq_context: 0 &dev->mutex acpi_wakeup_lock irq_context: 0 &dev->mutex semaphore->lock irq_context: 0 &dev->mutex *(&acpi_gbl_reference_count_lock) irq_context: 0 &dev->mutex irq_domain_mutex irq_context: 0 &dev->mutex &domain->mutex irq_context: 0 &dev->mutex kthread_create_lock irq_context: 0 &dev->mutex &p->pi_lock irq_context: 0 &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &x->wait irq_context: 0 &dev->mutex &rq->__lock irq_context: 0 &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &desc->request_mutex irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &desc->wait_for_threads irq_context: 0 &desc->wait_for_threads irq_context: 0 &desc->wait_for_threads &p->pi_lock irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock irq_context: 0 &dev->mutex register_lock irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock tk_core.seq.seqcount irq_context: 0 &dev->mutex register_lock proc_subdir_lock irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock irq_context: 0 &dev->mutex register_lock fs_reclaim irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &dev->mutex register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_lock pool_lock#2 irq_context: 0 &p->pi_lock &rq->__lock &rt_rq->rt_runtime_lock irq_context: 0 &dev->mutex register_lock proc_inum_ida.xa_lock irq_context: 0 &dev->mutex register_lock proc_subdir_lock irq_context: 0 &x->wait#7 irq_context: 0 &dev->mutex register_lock &c->lock irq_context: 0 &dev->mutex register_lock &rq->__lock irq_context: 0 &dev->mutex register_lock &____s->seqcount irq_context: softirq rcu_callback &base->lock irq_context: softirq rcu_callback &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &irq_desc_lock_class irq_context: 0 &dev->mutex proc_subdir_lock irq_context: 0 &dev->mutex proc_inum_ida.xa_lock irq_context: 0 &dev->mutex proc_subdir_lock irq_context: 0 &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex &c->lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &x->wait#9 irq_context: 0 &dev->mutex gdp_mutex irq_context: 0 &dev->mutex gdp_mutex &k->list_lock irq_context: 0 &dev->mutex gdp_mutex fs_reclaim irq_context: 0 &dev->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex gdp_mutex lock irq_context: 0 &dev->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex bus_type_sem irq_context: 0 &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex dpm_list_mtx irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex subsys mutex#31 irq_context: 0 &dev->mutex subsys mutex#31 &k->k_lock irq_context: 0 &dev->mutex input_mutex irq_context: 0 &dev->mutex input_mutex fs_reclaim irq_context: 0 &dev->mutex input_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex input_mutex pool_lock#2 irq_context: 0 &dev->mutex input_mutex &dev->mutex#2 irq_context: 0 &dev->mutex input_mutex input_devices_poll_wait.lock irq_context: 0 &dev->mutex wakeup_ida.xa_lock irq_context: 0 &dev->mutex subsys mutex#13 irq_context: 0 &dev->mutex subsys mutex#13 &k->k_lock irq_context: 0 &dev->mutex events_lock irq_context: 0 &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 register_count_mutex irq_context: 0 register_count_mutex &k->list_lock irq_context: 0 register_count_mutex fs_reclaim irq_context: 0 register_count_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_count_mutex pool_lock#2 irq_context: 0 register_count_mutex lock irq_context: 0 register_count_mutex lock kernfs_idr_lock irq_context: 0 register_count_mutex &root->kernfs_rwsem irq_context: 0 register_count_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 register_count_mutex &k->k_lock irq_context: 0 register_count_mutex uevent_sock_mutex irq_context: 0 register_count_mutex &obj_hash[i].lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_count_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex cpu_add_remove_lock irq_context: 0 &dev->mutex thermal_cdev_ida.xa_lock irq_context: 0 &dev->mutex cpufreq_driver_lock irq_context: 0 &dev->mutex subsys mutex#32 irq_context: 0 &dev->mutex subsys mutex#32 &k->k_lock irq_context: 0 &dev->mutex thermal_list_lock irq_context: 0 &dev->mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) running_helpers_waitq.lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 scmi_requested_devices_mtx irq_context: 0 scmi_requested_devices_mtx fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 scmi_requested_devices_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock pool_lock#2 irq_context: 0 scmi_requested_devices_mtx pool_lock#2 irq_context: 0 rcu_read_lock rcu_node_0 irq_context: 0 tasklist_lock quarantine_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_callback &meta->lock irq_context: softirq rcu_callback kfence_freelist_lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key &rq->__lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback quarantine_lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex iommu_probe_device_lock irq_context: 0 &dev->mutex acpi_link_lock irq_context: 0 &dev->mutex acpi_link_lock fs_reclaim irq_context: 0 &dev->mutex acpi_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_link_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_link_lock semaphore->lock irq_context: 0 &dev->mutex acpi_link_lock &obj_hash[i].lock irq_context: 0 &dev->mutex acpi_link_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 &dev->mutex acpi_link_lock &c->lock irq_context: 0 &dev->mutex acpi_link_lock &____s->seqcount irq_context: 0 &dev->mutex acpi_link_lock (console_sem).lock irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex acpi_link_lock &rq->__lock irq_context: 0 &dev->mutex acpi_link_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &drv->dynids.lock irq_context: 0 &dev->mutex pci_lock irq_context: 0 &dev->mutex virtio_index_ida.xa_lock irq_context: 0 &dev->mutex &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex &dev->mutex &k->list_lock irq_context: 0 &dev->mutex &dev->mutex &k->k_lock irq_context: 0 &dev->mutex subsys mutex#33 irq_context: 0 vdpa_dev_lock irq_context: 0 &type->i_mutex_dir_key#2 irq_context: 0 &type->i_mutex_dir_key#2 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#2 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#2 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 &dentry->d_lock &wq irq_context: 0 subsys mutex#34 irq_context: 0 subsys mutex#34 &k->k_lock irq_context: 0 &x->wait#11 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq lib/debugobjects.c:101 irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (debug_obj_work).work irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key/1 quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_callback put_task_map-wait-type-override &base->lock irq_context: softirq rcu_callback put_task_map-wait-type-override &base->lock &obj_hash[i].lock irq_context: 0 &tsk->futex_exit_mutex &rq->__lock irq_context: 0 port_mutex irq_context: 0 port_mutex fs_reclaim irq_context: 0 port_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex pool_lock#2 irq_context: 0 port_mutex &x->wait#9 irq_context: 0 port_mutex &obj_hash[i].lock irq_context: 0 port_mutex &k->list_lock irq_context: 0 port_mutex lock irq_context: 0 port_mutex lock kernfs_idr_lock irq_context: 0 port_mutex &root->kernfs_rwsem irq_context: 0 port_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex bus_type_sem irq_context: 0 port_mutex sysfs_symlink_target_lock irq_context: 0 port_mutex &c->lock irq_context: 0 port_mutex &____s->seqcount irq_context: 0 port_mutex &k->k_lock irq_context: 0 port_mutex &root->kernfs_rwsem irq_context: 0 port_mutex &dev->power.lock irq_context: 0 port_mutex dpm_list_mtx irq_context: 0 port_mutex uevent_sock_mutex irq_context: 0 port_mutex rcu_read_lock &pool->lock irq_context: 0 port_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 port_mutex running_helpers_waitq.lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock irq_context: 0 port_mutex &dev->mutex &k->list_lock irq_context: 0 port_mutex &dev->mutex &k->k_lock irq_context: 0 port_mutex &dev->mutex device_links_srcu irq_context: 0 port_mutex &dev->mutex fwnode_link_lock irq_context: 0 port_mutex &dev->mutex device_links_lock irq_context: 0 port_mutex &dev->mutex fs_reclaim irq_context: 0 port_mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &dev->mutex pool_lock#2 irq_context: 0 port_mutex &dev->mutex &dev->devres_lock irq_context: 0 port_mutex &dev->mutex pinctrl_list_mutex irq_context: 0 port_mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 port_mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 port_mutex &dev->mutex &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 port_mutex &dev->mutex lock irq_context: 0 port_mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 port_mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 port_mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex &dev->mutex deferred_probe_mutex irq_context: 0 port_mutex &dev->mutex &c->lock irq_context: 0 port_mutex &dev->mutex &pcp->lock &zone->lock irq_context: 0 port_mutex &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 port_mutex &dev->mutex &____s->seqcount irq_context: 0 port_mutex &dev->mutex uevent_sock_mutex irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 port_mutex &dev->mutex probe_waitqueue.lock irq_context: 0 port_mutex subsys mutex#14 irq_context: 0 port_mutex &xa->xa_lock#3 irq_context: 0 port_mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &port->mutex irq_context: 0 port_mutex &port->mutex fs_reclaim irq_context: 0 port_mutex &port->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &port->mutex pool_lock#2 irq_context: 0 port_mutex &port->mutex console_mutex irq_context: 0 port_mutex &port->mutex ctrl_ida.xa_lock irq_context: 0 port_mutex &port->mutex &x->wait#9 irq_context: 0 port_mutex &port->mutex &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex &dev->power.lock irq_context: 0 port_mutex &port->mutex &c->lock irq_context: 0 port_mutex &port->mutex &____s->seqcount irq_context: 0 port_mutex &port->mutex &k->list_lock irq_context: 0 port_mutex &port->mutex lock irq_context: 0 port_mutex &port->mutex lock kernfs_idr_lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex &port->mutex bus_type_sem irq_context: 0 port_mutex &port->mutex sysfs_symlink_target_lock irq_context: 0 port_mutex &port->mutex &k->k_lock irq_context: 0 port_mutex &port->mutex dpm_list_mtx irq_context: 0 port_mutex &port->mutex uevent_sock_mutex irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex &port->mutex running_helpers_waitq.lock irq_context: 0 port_mutex &port->mutex &pcp->lock &zone->lock irq_context: 0 port_mutex &port->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 port_mutex &port->mutex &dev->mutex &k->list_lock irq_context: 0 port_mutex &port->mutex &dev->mutex &k->k_lock irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex subsys mutex#15 irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 port_mutex &port->mutex kernfs_idr_lock irq_context: 0 port_mutex &port->mutex &k->k_lock klist_remove_lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 port_mutex &port->mutex deferred_probe_mutex irq_context: 0 port_mutex &port->mutex device_links_lock irq_context: 0 port_mutex &port->mutex mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &port->mutex gdp_mutex irq_context: 0 port_mutex &port->mutex gdp_mutex &k->list_lock irq_context: 0 port_mutex &port->mutex gdp_mutex fs_reclaim irq_context: 0 port_mutex &port->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &port->mutex gdp_mutex &c->lock irq_context: 0 port_mutex &port->mutex gdp_mutex &____s->seqcount irq_context: 0 port_mutex &port->mutex gdp_mutex pool_lock#2 irq_context: 0 port_mutex &port->mutex gdp_mutex lock irq_context: 0 port_mutex &port->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex &port->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 port_mutex &port->mutex req_lock irq_context: 0 port_mutex &port->mutex &p->pi_lock irq_context: 0 port_mutex &port->mutex &p->pi_lock &rq->__lock irq_context: 0 port_mutex &port->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &port->mutex &x->wait#11 irq_context: 0 port_mutex &port->mutex &rq->__lock irq_context: 0 port_mutex &port->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &port->mutex subsys mutex#16 irq_context: 0 port_mutex &port->mutex subsys mutex#16 &k->k_lock irq_context: 0 port_mutex &port->mutex chrdevs_lock irq_context: 0 port_mutex &port->mutex &cfs_rq->removed.lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 port_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 port_mutex &port->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 port_mutex &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (work_completion)(&buf->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &n->list_lock irq_context: 0 &dev->mutex rng_index_ida.xa_lock irq_context: 0 &dev->mutex &md->mutex irq_context: 0 &dev->mutex free_vmap_area_lock irq_context: 0 &dev->mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &dev->mutex free_vmap_area_lock pool_lock#2 irq_context: 0 &dev->mutex vmap_area_lock irq_context: 0 &dev->mutex &md->mutex pci_lock irq_context: 0 &dev->mutex &md->mutex fs_reclaim irq_context: 0 &dev->mutex &md->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 irq_context: 0 &dev->mutex &md->mutex &rq->__lock irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 &c->lock irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 &____s->seqcount irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &its->lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &zone->lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &____s->seqcount irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &c->lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &md->mutex &domain->mutex fs_reclaim irq_context: 0 &dev->mutex &md->mutex &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex &domain->mutex pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &domain->mutex &irq_desc_lock_class irq_context: 0 &dev->mutex &md->mutex &irq_desc_lock_class irq_context: 0 &dev->mutex &md->mutex tmpmask_lock irq_context: 0 &dev->mutex &md->mutex &its->lock irq_context: 0 &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &md->mutex lock irq_context: 0 &dev->mutex &md->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &md->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &c->lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class &its->lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock tmpmask_lock irq_context: 0 &dev->mutex &zone->lock irq_context: 0 &dev->mutex &zone->lock &____s->seqcount irq_context: 0 &dev->mutex &dev->vqs_list_lock irq_context: 0 &dev->mutex &vp_dev->lock irq_context: 0 &dev->mutex rng_mutex irq_context: 0 &dev->mutex rng_mutex &x->wait#13 irq_context: 0 &dev->mutex rng_mutex fs_reclaim irq_context: 0 &dev->mutex rng_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rng_mutex pool_lock#2 irq_context: 0 &dev->mutex rng_mutex kthread_create_lock irq_context: 0 &dev->mutex rng_mutex &p->pi_lock irq_context: 0 &dev->mutex rng_mutex &x->wait irq_context: 0 &dev->mutex rng_mutex &rq->__lock irq_context: 0 &dev->mutex rng_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 lock pidmap_lock &c->lock irq_context: 0 lock pidmap_lock &____s->seqcount irq_context: hardirq &x->wait#14 irq_context: 0 &dev->mutex rng_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rng_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rng_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rng_mutex irq_context: 0 rng_mutex rng_mutex.wait_lock irq_context: 0 rng_mutex &rq->__lock irq_context: 0 rng_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rng_mutex.wait_lock irq_context: 0 reading_mutex irq_context: 0 &dev->mutex reading_mutex irq_context: 0 &dev->mutex input_pool.lock irq_context: 0 &dev->mutex &dev->config_lock irq_context: softirq drivers/char/random.c:1010 irq_context: softirq drivers/char/random.c:1010 input_pool.lock irq_context: 0 misc_mtx &cfs_rq->removed.lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 &dev->devres_lock irq_context: 0 &dev->managed.lock irq_context: 0 &type->s_umount_key#23/1 irq_context: 0 &type->s_umount_key#23/1 fs_reclaim irq_context: 0 &type->s_umount_key#23/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#23/1 pool_lock#2 irq_context: 0 &type->s_umount_key#23/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#23/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#23/1 shrinker_mutex irq_context: 0 &type->s_umount_key#23/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#23/1 sb_lock irq_context: 0 &type->s_umount_key#23/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#23/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#23/1 &sb->s_type->i_lock_key#21 irq_context: 0 &type->s_umount_key#23/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#23/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#23/1 &sb->s_type->i_lock_key#21 &dentry->d_lock irq_context: 0 &type->s_umount_key#23/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#21 irq_context: 0 lock drm_minor_lock irq_context: 0 lock drm_minor_lock pool_lock#2 irq_context: 0 stack_depot_init_mutex irq_context: 0 subsys mutex#35 irq_context: 0 subsys mutex#35 &k->k_lock irq_context: 0 drm_minor_lock irq_context: 0 &dev->mode_config.idr_mutex irq_context: 0 &dev->mode_config.idr_mutex fs_reclaim irq_context: 0 &dev->mode_config.idr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mode_config.idr_mutex pool_lock#2 irq_context: 0 crtc_ww_class_acquire irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_acquire irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_acquire reservation_ww_class_mutex irq_context: 0 &dev->mode_config.blob_lock irq_context: 0 &xa->xa_lock#7 irq_context: 0 &xa->xa_lock#8 irq_context: 0 &dev->mode_config.connector_list_lock irq_context: 0 &dev->vbl_lock irq_context: 0 drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 drm_connector_list_iter fs_reclaim irq_context: 0 drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex irq_context: 0 drm_connector_list_iter &connector->mutex fs_reclaim irq_context: 0 drm_connector_list_iter &connector->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter &connector->mutex pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex &x->wait#9 irq_context: 0 drm_connector_list_iter &connector->mutex &obj_hash[i].lock irq_context: 0 drm_connector_list_iter &connector->mutex &k->list_lock irq_context: 0 drm_connector_list_iter &connector->mutex lock irq_context: 0 drm_connector_list_iter &connector->mutex lock kernfs_idr_lock irq_context: 0 drm_connector_list_iter &connector->mutex &root->kernfs_rwsem irq_context: 0 drm_connector_list_iter &connector->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 drm_connector_list_iter &connector->mutex bus_type_sem irq_context: 0 drm_connector_list_iter &connector->mutex sysfs_symlink_target_lock irq_context: 0 drm_connector_list_iter &connector->mutex &____s->seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &c->lock irq_context: 0 drm_connector_list_iter &connector->mutex &root->kernfs_rwsem irq_context: 0 drm_connector_list_iter &connector->mutex &dev->power.lock irq_context: 0 drm_connector_list_iter &connector->mutex dpm_list_mtx irq_context: 0 drm_connector_list_iter &connector->mutex uevent_sock_mutex irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 drm_connector_list_iter &connector->mutex running_helpers_waitq.lock irq_context: 0 drm_connector_list_iter &connector->mutex &base->lock irq_context: 0 drm_connector_list_iter &connector->mutex &base->lock &obj_hash[i].lock irq_context: 0 drm_connector_list_iter &connector->mutex &k->k_lock irq_context: 0 drm_connector_list_iter &connector->mutex subsys mutex#35 irq_context: 0 drm_connector_list_iter &connector->mutex subsys mutex#35 &k->k_lock irq_context: 0 drm_connector_list_iter &connector->mutex pin_fs_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 drm_connector_list_iter &connector->mutex &dev->mode_config.idr_mutex irq_context: 0 drm_connector_list_iter &connector->mutex connector_list_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &dev->filelist_mutex irq_context: 0 &dev->clientlist_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &sbinfo->stat_lock irq_context: 0 &dev->clientlist_mutex &helper->lock mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &sb->s_type->i_lock_key irq_context: 0 &dev->clientlist_mutex &helper->lock &s->s_inode_list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock batched_entropy_u32.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &mgr->vm_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &mgr->vm_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock lock &file_private->table_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock lock &file_private->table_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &node->vm_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &file_private->table_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->mode_config.idr_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->mode_config.fb_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &file->fbs_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &prime_fpriv->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &node->vm_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &node->vm_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &file_private->table_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &file_private->table_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock free_vmap_area_lock irq_context: 0 &dev->clientlist_mutex &helper->lock vmap_area_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock init_mm.page_table_lock irq_context: 0 &dev->clientlist_mutex &helper->lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock free_vmap_area_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock irq_context: 0 &dev->clientlist_mutex registration_lock fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock &pcp->lock &zone->lock irq_context: 0 &dev->clientlist_mutex registration_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock &x->wait#9 irq_context: 0 &dev->clientlist_mutex registration_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock lock irq_context: 0 &dev->clientlist_mutex registration_lock lock kernfs_idr_lock irq_context: 0 &dev->clientlist_mutex registration_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock bus_type_sem irq_context: 0 &dev->clientlist_mutex registration_lock sysfs_symlink_target_lock irq_context: 0 &dev->clientlist_mutex registration_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock &dev->power.lock irq_context: 0 &dev->clientlist_mutex registration_lock dpm_list_mtx irq_context: 0 &dev->clientlist_mutex registration_lock req_lock irq_context: 0 &dev->clientlist_mutex registration_lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock &x->wait#11 irq_context: 0 &dev->clientlist_mutex registration_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock uevent_sock_mutex irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock running_helpers_waitq.lock irq_context: 0 &dev->clientlist_mutex registration_lock &k->k_lock irq_context: 0 &dev->clientlist_mutex registration_lock subsys mutex#11 irq_context: 0 &dev->clientlist_mutex registration_lock subsys mutex#11 &k->k_lock irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock (console_sem).lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &fb_info->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock vt_event_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &base->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &x->wait#9 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock gdp_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock gdp_mutex &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock lock kernfs_idr_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock bus_type_sem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock sysfs_symlink_target_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &dev->power.lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock dpm_list_mtx irq_context: 0 &dev->clientlist_mutex registration_lock console_lock uevent_sock_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock running_helpers_waitq.lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock subsys mutex#6 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock subsys mutex#6 &k->k_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->mode_config.idr_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->mode_config.blob_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &crtc->commit_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &sb->s_type->i_lock_key irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &info->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex lock#4 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex lock#4 &lruvec->lru_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &pcp->lock &zone->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 &____s->seqcount irq_context: 0 pool_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex free_vmap_area_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex vmap_area_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex init_mm.page_table_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &____s->seqcount#5 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &x->wait#15 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock &____s->seqcount#5 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &x->wait#15 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (work_completion)(&vkms_state->composer_work) irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->damage_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->damage_lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock &lock->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &vkms_out->lock irq_context: hardirq &vkms_out->lock &dev->event_lock irq_context: hardirq &vkms_out->lock &dev->event_lock &dev->vblank_time_lock irq_context: hardirq &vkms_out->lock &dev->event_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock irq_context: hardirq &vkms_out->lock &dev->event_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock &____s->seqcount#5 irq_context: hardirq &vkms_out->lock &dev->event_lock &vblank->queue irq_context: hardirq &vkms_out->lock &dev->event_lock &____s->seqcount#5 irq_context: hardirq &vkms_out->lock &dev->event_lock &obj_hash[i].lock irq_context: hardirq &vkms_out->lock &dev->event_lock &base->lock irq_context: hardirq &vkms_out->lock &dev->event_lock &base->lock &obj_hash[i].lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock &rq->__lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &vkms_out->lock &dev->event_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (&timer.timer) irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (work_completion)(&vkms_state->composer_work)#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &lock->wait_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock (console_sem).lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_owner_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock reservation_ww_class_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->clientlist_mutex (console_sem).lock irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->clientlist_mutex kernel_fb_helper_lock irq_context: 0 drivers_lock#2 irq_context: 0 devices_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) pool_lock#2 irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex lock kernfs_idr_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &root->kernfs_rwsem irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &c->lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state-up &x->wait#9 irq_context: 0 cpu_hotplug_lock cpuhp_state-up &k->list_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up bus_type_sem irq_context: 0 cpu_hotplug_lock cpuhp_state-up &k->k_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up lock kernfs_idr_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock cpuhp_state-up &pcp->lock irq_context: 0 blk_queue_ida.xa_lock irq_context: 0 &sb->s_type->i_lock_key#3 irq_context: 0 &xa->xa_lock#10 irq_context: 0 lock &q->queue_lock irq_context: 0 lock &q->queue_lock &blkcg->lock irq_context: 0 &q->queue_lock irq_context: 0 &q->queue_lock pool_lock#2 irq_context: 0 &q->queue_lock pcpu_lock irq_context: 0 &q->queue_lock &obj_hash[i].lock irq_context: 0 &q->queue_lock percpu_counters_lock irq_context: 0 &q->queue_lock &blkcg->lock irq_context: 0 &bdev->bd_size_lock irq_context: 0 subsys mutex#36 irq_context: 0 subsys mutex#36 &k->k_lock irq_context: 0 dev_hotplug_mutex irq_context: 0 dev_hotplug_mutex &dev->power.lock irq_context: 0 &q->sysfs_dir_lock irq_context: 0 &q->sysfs_dir_lock fs_reclaim irq_context: 0 &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock lock irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &q->sysfs_dir_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &____s->seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 percpu_ref_switch_lock irq_context: 0 subsys mutex#37 irq_context: 0 subsys mutex#37 &k->k_lock irq_context: 0 cgwb_lock irq_context: 0 bdi_lock irq_context: 0 inode_hash_lock irq_context: 0 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 bdev_lock irq_context: 0 &disk->open_mutex irq_context: 0 &disk->open_mutex fs_reclaim irq_context: 0 &disk->open_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex pool_lock#2 irq_context: 0 &disk->open_mutex free_vmap_area_lock irq_context: 0 &disk->open_mutex vmap_area_lock irq_context: 0 &disk->open_mutex &____s->seqcount irq_context: 0 &disk->open_mutex init_mm.page_table_lock irq_context: 0 &disk->open_mutex &xa->xa_lock#9 irq_context: 0 &disk->open_mutex lock#4 irq_context: 0 &disk->open_mutex mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex &c->lock irq_context: 0 &disk->open_mutex &mapping->i_private_lock irq_context: 0 &disk->open_mutex tk_core.seq.seqcount irq_context: 0 &disk->open_mutex &ret->b_uptodate_lock irq_context: 0 &disk->open_mutex &obj_hash[i].lock irq_context: 0 &disk->open_mutex &xa->xa_lock#9 pool_lock#2 irq_context: 0 &disk->open_mutex purge_vmap_area_lock irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 &obj_hash[i].lock pool_lock irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &disk->open_mutex lock#4 &lruvec->lru_lock irq_context: 0 &disk->open_mutex lock#5 irq_context: 0 &disk->open_mutex &lruvec->lru_lock irq_context: 0 lock &q->queue_lock &blkcg->lock pool_lock#2 irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &disk->open_mutex purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex purge_vmap_area_lock pool_lock#2 irq_context: 0 &q->queue_lock &c->lock irq_context: 0 &q->queue_lock &____s->seqcount irq_context: hardirq hrtimer_bases.lock fill_pool_map-wait-type-override &c->lock irq_context: hardirq hrtimer_bases.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &disk->open_mutex &xa->xa_lock#9 &c->lock irq_context: 0 &disk->open_mutex &xa->xa_lock#9 &____s->seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 fs_reclaim &rq->__lock irq_context: 0 &q->queue_lock &obj_hash[i].lock pool_lock irq_context: 0 loop_ctl_mutex irq_context: 0 loop_ctl_mutex fs_reclaim irq_context: 0 loop_ctl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 loop_ctl_mutex pool_lock#2 irq_context: 0 &q->sysfs_lock irq_context: 0 &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_lock pool_lock#2 irq_context: 0 &q->sysfs_lock &obj_hash[i].lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 &q->sysfs_lock fs_reclaim irq_context: 0 &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 &set->tag_list_lock irq_context: 0 &q->mq_freeze_lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex percpu_ref_switch_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex pin_fs_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &stats->lock irq_context: 0 &q->sysfs_lock &c->lock irq_context: 0 &q->sysfs_lock &____s->seqcount irq_context: 0 pcpu_alloc_mutex &rq->__lock irq_context: 0 pcpu_alloc_mutex &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 pcpu_alloc_mutex pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &____s->seqcount irq_context: 0 &q->sysfs_lock &obj_hash[i].lock pool_lock irq_context: 0 &q->sysfs_lock &rq->__lock irq_context: 0 &q->sysfs_lock &cfs_rq->removed.lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex &rq->__lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex &cfs_rq->removed.lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex &obj_hash[i].lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex &obj_hash[i].lock pool_lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 nbd_index_mutex irq_context: 0 nbd_index_mutex fs_reclaim irq_context: 0 nbd_index_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nbd_index_mutex pool_lock#2 irq_context: 0 set->srcu irq_context: 0 (work_completion)(&(&q->requeue_work)->work) irq_context: 0 (work_completion)(&(&hctx->run_work)->work) irq_context: 0 &q->debugfs_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock lock kernfs_idr_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex set->srcu irq_context: 0 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock &c->lock irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock &____s->seqcount irq_context: 0 &q->queue_lock &pcp->lock &zone->lock irq_context: 0 &q->queue_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &pcp->lock &zone->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq &(&ops->cursor_work)->timer irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) (console_sem).lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock &helper->damage_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock pool_lock irq_context: 0 zram_index_mutex irq_context: 0 zram_index_mutex fs_reclaim irq_context: 0 zram_index_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex pool_lock#2 irq_context: 0 zram_index_mutex blk_queue_ida.xa_lock irq_context: 0 zram_index_mutex &obj_hash[i].lock irq_context: 0 zram_index_mutex pcpu_alloc_mutex irq_context: 0 zram_index_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 zram_index_mutex bio_slab_lock irq_context: 0 zram_index_mutex percpu_counters_lock irq_context: 0 zram_index_mutex mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &sb->s_type->i_lock_key#3 irq_context: 0 zram_index_mutex &s->s_inode_list_lock irq_context: 0 zram_index_mutex &xa->xa_lock#10 irq_context: 0 zram_index_mutex lock irq_context: 0 zram_index_mutex lock &q->queue_lock irq_context: 0 zram_index_mutex lock &q->queue_lock &blkcg->lock irq_context: 0 zram_index_mutex &obj_hash[i].lock pool_lock irq_context: 0 zram_index_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 zram_index_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 zram_index_mutex &q->queue_lock irq_context: 0 zram_index_mutex &q->queue_lock pool_lock#2 irq_context: 0 zram_index_mutex &q->queue_lock pcpu_lock irq_context: 0 zram_index_mutex &q->queue_lock &obj_hash[i].lock irq_context: 0 zram_index_mutex &q->queue_lock percpu_counters_lock irq_context: 0 zram_index_mutex &q->queue_lock &blkcg->lock irq_context: 0 zram_index_mutex &x->wait#9 irq_context: 0 zram_index_mutex &bdev->bd_size_lock irq_context: 0 zram_index_mutex &k->list_lock irq_context: 0 zram_index_mutex gdp_mutex irq_context: 0 zram_index_mutex gdp_mutex &k->list_lock irq_context: 0 zram_index_mutex lock kernfs_idr_lock irq_context: 0 zram_index_mutex &root->kernfs_rwsem irq_context: 0 zram_index_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 zram_index_mutex bus_type_sem irq_context: 0 zram_index_mutex sysfs_symlink_target_lock irq_context: 0 zram_index_mutex &c->lock irq_context: 0 zram_index_mutex &____s->seqcount irq_context: 0 zram_index_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 zram_index_mutex &root->kernfs_rwsem irq_context: 0 zram_index_mutex &dev->power.lock irq_context: 0 zram_index_mutex dpm_list_mtx irq_context: 0 zram_index_mutex req_lock irq_context: 0 zram_index_mutex &p->pi_lock irq_context: 0 zram_index_mutex &x->wait#11 irq_context: 0 zram_index_mutex &rq->__lock irq_context: 0 zram_index_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 zram_index_mutex subsys mutex#36 irq_context: 0 zram_index_mutex subsys mutex#36 &k->k_lock irq_context: 0 zram_index_mutex dev_hotplug_mutex irq_context: 0 zram_index_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock fs_reclaim irq_context: 0 zram_index_mutex &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &c->lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &____s->seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock pool_lock#2 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 zram_index_mutex &q->sysfs_dir_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 zram_index_mutex percpu_ref_switch_lock irq_context: 0 zram_index_mutex uevent_sock_mutex irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 zram_index_mutex running_helpers_waitq.lock irq_context: 0 zram_index_mutex subsys mutex#37 irq_context: 0 zram_index_mutex subsys mutex#37 &k->k_lock irq_context: 0 zram_index_mutex cgwb_lock irq_context: 0 zram_index_mutex pin_fs_lock irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 zram_index_mutex bdi_lock irq_context: 0 zram_index_mutex inode_hash_lock irq_context: 0 zram_index_mutex inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 zram_index_mutex (console_sem).lock irq_context: 0 zram_index_mutex console_lock console_srcu console_owner_lock irq_context: 0 zram_index_mutex console_lock console_srcu console_owner irq_context: 0 zram_index_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 zram_index_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 subsys mutex#38 irq_context: 0 subsys mutex#38 &k->k_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 configfs_dirent_lock irq_context: 0 &q->sysfs_lock &xa->xa_lock#11 pool_lock#2 irq_context: 0 &lock irq_context: 0 &lock nullb_indexes.xa_lock irq_context: 0 &q->sysfs_dir_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex free_vmap_area_lock pool_lock#2 irq_context: 0 &disk->open_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 &disk->open_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex rcu_read_lock &base->lock irq_context: 0 &disk->open_mutex rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex rcu_read_lock &ret->b_uptodate_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock pool_lock#2 irq_context: 0 nfc_index_ida.xa_lock irq_context: 0 nfc_devlist_mutex irq_context: 0 nfc_devlist_mutex fs_reclaim irq_context: 0 nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex pool_lock#2 irq_context: 0 nfc_devlist_mutex &k->list_lock irq_context: 0 nfc_devlist_mutex gdp_mutex irq_context: 0 nfc_devlist_mutex gdp_mutex &k->list_lock irq_context: 0 nfc_devlist_mutex gdp_mutex fs_reclaim irq_context: 0 nfc_devlist_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex gdp_mutex &c->lock irq_context: 0 nfc_devlist_mutex gdp_mutex &____s->seqcount irq_context: 0 nfc_devlist_mutex gdp_mutex pool_lock#2 irq_context: 0 nfc_devlist_mutex gdp_mutex lock irq_context: 0 nfc_devlist_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 nfc_devlist_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 nfc_devlist_mutex lock irq_context: 0 nfc_devlist_mutex lock kernfs_idr_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 nfc_devlist_mutex bus_type_sem irq_context: 0 nfc_devlist_mutex sysfs_symlink_target_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex &dev->power.lock irq_context: 0 nfc_devlist_mutex dpm_list_mtx irq_context: 0 nfc_devlist_mutex uevent_sock_mutex irq_context: 0 nfc_devlist_mutex &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &obj_hash[i].lock pool_lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex running_helpers_waitq.lock irq_context: 0 nfc_devlist_mutex subsys mutex#39 irq_context: 0 nfc_devlist_mutex subsys mutex#39 &k->k_lock irq_context: 0 llcp_devices_lock irq_context: 0 &dev->mutex rfkill_global_mutex irq_context: 0 &dev->mutex rfkill_global_mutex fs_reclaim irq_context: 0 &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rfkill_global_mutex pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex &k->list_lock irq_context: 0 &dev->mutex rfkill_global_mutex lock irq_context: 0 &dev->mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex rfkill_global_mutex bus_type_sem irq_context: 0 &dev->mutex rfkill_global_mutex &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex &____s->seqcount irq_context: 0 &dev->mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rfkill_global_mutex &dev->power.lock irq_context: 0 &dev->mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 &dev->mutex rfkill_global_mutex &rfkill->lock irq_context: 0 &dev->mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 &dev->mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rfkill_global_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex rfkill_global_mutex &k->k_lock irq_context: 0 &dev->mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 &dev->mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 &dev->mutex rfkill_global_mutex triggers_list_lock irq_context: 0 &dev->mutex rfkill_global_mutex leds_list_lock irq_context: 0 &dev->mutex rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rfkill->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &____s->seqcount irq_context: 0 &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rq->__lock irq_context: 0 dma_heap_minors.xa_lock irq_context: 0 subsys mutex#41 irq_context: 0 subsys mutex#41 &k->k_lock irq_context: 0 heap_list_lock irq_context: 0 dma_heap_minors.xa_lock pool_lock#2 irq_context: 0 misc_mtx lock kernfs_idr_lock &____s->seqcount irq_context: 0 cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 major_names_lock &c->lock irq_context: 0 major_names_lock &____s->seqcount irq_context: 0 subsys mutex#42 irq_context: 0 subsys mutex#42 &k->list_lock irq_context: 0 subsys mutex#42 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim &rq->__lock irq_context: 0 nvmf_hosts_mutex irq_context: 0 subsys mutex#43 irq_context: 0 subsys mutex#43 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex semaphore->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex iommu_probe_device_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex *(&acpi_gbl_reference_count_lock) irq_context: 0 nvmf_transports_rwsem irq_context: 0 subsys mutex#44 irq_context: 0 subsys mutex#44 &k->k_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &____s->seqcount irq_context: 0 nvmet_config_sem irq_context: 0 subsys mutex#45 irq_context: 0 subsys mutex#45 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock semaphore->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock (console_sem).lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex irq_domain_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &domain->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &drv->dynids.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cpu_add_remove_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_instance_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex chrdevs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex req_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#11 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#46 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#46 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx pm_qos_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex resource_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex free_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock pools_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex (console_sem).lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7/2 irq_context: 0 backend_mutex irq_context: 0 scsi_mib_index_lock irq_context: 0 hba_lock irq_context: 0 device_mutex irq_context: 0 device_mutex fs_reclaim irq_context: 0 device_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 device_mutex pool_lock#2 irq_context: 0 console_owner_lock irq_context: 0 console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pci_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex pci_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &xa->xa_lock#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &hba->device_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex hrtimer_bases.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock init_fs.seq.seqcount irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 part_parser_lock irq_context: 0 mtd_table_mutex irq_context: 0 chip_drvs_lock irq_context: 0 (kmod_concurrent_max).lock irq_context: 0 &x->wait#16 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &sig->wait_chldexit irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &____s->seqcount#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &prev->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &(&sig->stats_lock)->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) css_set_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock input_pool.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#16 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#16 &p->pi_lock irq_context: 0 mtd_table_mutex fs_reclaim irq_context: 0 mtd_table_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex pool_lock#2 irq_context: 0 mtd_table_mutex &x->wait#9 irq_context: 0 mtd_table_mutex &obj_hash[i].lock irq_context: 0 mtd_table_mutex &k->list_lock irq_context: 0 mtd_table_mutex gdp_mutex irq_context: 0 mtd_table_mutex gdp_mutex &k->list_lock irq_context: 0 mtd_table_mutex gdp_mutex fs_reclaim irq_context: 0 mtd_table_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex gdp_mutex pool_lock#2 irq_context: 0 mtd_table_mutex gdp_mutex lock irq_context: 0 mtd_table_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 mtd_table_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 mtd_table_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex lock irq_context: 0 mtd_table_mutex lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex bus_type_sem irq_context: 0 mtd_table_mutex sysfs_symlink_target_lock irq_context: 0 mtd_table_mutex &c->lock irq_context: 0 mtd_table_mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 mtd_table_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock pool_lock#2 irq_context: 0 mtd_table_mutex dpm_list_mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock &____s->seqcount irq_context: 0 mtd_table_mutex req_lock irq_context: 0 mtd_table_mutex &p->pi_lock irq_context: 0 mtd_table_mutex &p->pi_lock &rq->__lock irq_context: 0 mtd_table_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex proc_subdir_lock irq_context: 0 mtd_table_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pcpu_alloc_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex batched_entropy_u32.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex blk_queue_ida.xa_lock irq_context: 0 mtd_table_mutex &x->wait#11 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock irq_context: 0 mtd_table_mutex uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock pool_lock#2 irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock cpu_hotplug_lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock fs_reclaim irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &set->tag_list_lock irq_context: 0 mtd_table_mutex subsys mutex#47 irq_context: 0 mtd_table_mutex subsys mutex#47 &k->k_lock irq_context: 0 mtd_table_mutex devtree_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &ctrl->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &ctrl->lock &ctrl->state_wq irq_context: 0 mtd_table_mutex nvmem_ida.xa_lock irq_context: 0 mtd_table_mutex nvmem_cell_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &hctx->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &hctx->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#17 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &base->lock &obj_hash[i].lock irq_context: softirq &x->wait#17 irq_context: softirq &x->wait#17 &p->pi_lock irq_context: softirq &x->wait#17 &p->pi_lock &rq->__lock irq_context: softirq &x->wait#17 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex &dev->mutex &dev->power.lock irq_context: 0 mtd_table_mutex &dev->mutex &k->list_lock irq_context: 0 mtd_table_mutex &dev->mutex &k->k_lock irq_context: 0 mtd_table_mutex subsys mutex#48 irq_context: 0 mtd_table_mutex nvmem_mutex irq_context: 0 mtd_table_mutex pin_fs_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex (&timer.timer) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &____s->seqcount irq_context: 0 mtd_table_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock pool_lock#2 irq_context: 0 mtd_table_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &k->list_lock irq_context: 0 mtd_table_mutex lock kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock bus_type_sem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock dpm_list_mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock subsys mutex#49 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock subsys mutex#49 &k->k_lock irq_context: 0 mtd_table_mutex (console_sem).lock irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner_lock irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &xa->xa_lock#12 irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 mtd_table_mutex pcpu_alloc_mutex irq_context: 0 mtd_table_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex irq_context: 0 mtd_table_mutex batched_entropy_u32.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class irq_context: 0 mtd_table_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class irq_resend_lock irq_context: 0 mtd_table_mutex blk_queue_ida.xa_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class &its->lock irq_context: 0 mtd_table_mutex &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex proc_subdir_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->sysfs_lock cpu_hotplug_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &ent->pde_unload_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex proc_inum_ida.xa_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex pool_lock#2 irq_context: 0 mtd_table_mutex &obj_hash[i].lock pool_lock irq_context: 0 mtd_table_mutex &set->tag_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock pool_lock#2 irq_context: 0 mtd_table_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 mtd_table_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex pci_lock irq_context: 0 mtd_table_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 mtd_table_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 mtd_table_mutex bio_slab_lock irq_context: 0 mtd_table_mutex percpu_counters_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex irq_context: 0 mtd_table_mutex &s->s_inode_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &irq_desc_lock_class irq_context: 0 mtd_table_mutex &xa->xa_lock#10 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex pool_lock#2 irq_context: 0 mtd_table_mutex lock &q->queue_lock irq_context: 0 mtd_table_mutex lock &q->queue_lock &blkcg->lock irq_context: 0 mtd_table_mutex &pcp->lock &zone->lock irq_context: 0 mtd_table_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->mq_freeze_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 mtd_table_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock lpi_range_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex set->srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock lpi_range_lock pool_lock#2 irq_context: 0 mtd_table_mutex percpu_ref_switch_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->queue_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock &its->lock irq_context: 0 mtd_table_mutex &q->queue_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->queue_lock pcpu_lock irq_context: 0 mtd_table_mutex &q->queue_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->queue_lock percpu_counters_lock irq_context: 0 mtd_table_mutex &q->queue_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->queue_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock proc_subdir_lock irq_context: 0 mtd_table_mutex &q->queue_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &ent->pde_unload_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->queue_lock &blkcg->lock irq_context: 0 mtd_table_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 mtd_table_mutex &bdev->bd_size_lock irq_context: 0 mtd_table_mutex elv_list_lock irq_context: 0 mtd_table_mutex (work_completion)(&(&q->requeue_work)->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 mtd_table_mutex (work_completion)(&(&hctx->run_work)->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 mtd_table_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 mtd_table_mutex &q->debugfs_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &xa->xa_lock#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex purge_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock pci_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock free_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &xa->xa_lock#6 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 mtd_table_mutex subsys mutex#36 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock irq_context: 0 mtd_table_mutex subsys mutex#36 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex dev_hotplug_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 mtd_table_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &c->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &cma->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 (work_completion)(work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 rcu_read_lock (wq_completion)mm_percpu_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 &x->wait#10 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) lock#4 irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) lock#5 irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&barr->work) irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&barr->work) &rq->__lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex &zone->lock &____s->seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex set->srcu irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex percpu_ref_switch_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex pin_fs_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &stats->lock irq_context: 0 mtd_table_mutex subsys mutex#37 irq_context: 0 mtd_table_mutex subsys mutex#37 &k->k_lock irq_context: 0 mtd_table_mutex cgwb_lock irq_context: 0 mtd_table_mutex bdi_lock irq_context: 0 mtd_table_mutex inode_hash_lock irq_context: 0 mtd_table_mutex inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: softirq &x->wait#17 &p->pi_lock &cfs_rq->removed.lock irq_context: softirq &x->wait#17 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class tmp_mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class tmp_mask_lock tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem krc.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem krc.lock hrtimer_bases.lock irq_context: 0 pernet_ops_rwsem krc.lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem krc.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem krc.lock &base->lock irq_context: 0 pernet_ops_rwsem krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex stack_depot_init_mutex irq_context: 0 rtnl_mutex pcpu_alloc_mutex irq_context: 0 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex kthread_create_lock irq_context: 0 rtnl_mutex &p->pi_lock irq_context: 0 rtnl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &x->wait irq_context: 0 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex wq_pool_mutex irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex irq_context: 0 rtnl_mutex crngs.lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &ctrl->namespaces_rwsem irq_context: 0 rtnl_mutex net_rwsem irq_context: 0 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex &x->wait#9 irq_context: 0 rtnl_mutex &k->list_lock irq_context: 0 rtnl_mutex gdp_mutex irq_context: 0 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 rtnl_mutex lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex bus_type_sem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex (work_completion)(&ctrl->scan_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock (wq_completion)nvme-wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#10 irq_context: 0 rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex &dev->power.lock irq_context: 0 rtnl_mutex dpm_list_mtx irq_context: 0 rtnl_mutex uevent_sock_mutex irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex running_helpers_waitq.lock irq_context: 0 rtnl_mutex subsys mutex#20 irq_context: 0 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 rtnl_mutex &dir->lock#2 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->async_event_work) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->async_event_work) &nvmeq->sq_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &hctx->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &hctx->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &x->wait#17 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &base->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock (&timer.timer) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &____s->seqcount irq_context: 0 rtnl_mutex dev_hotplug_mutex irq_context: 0 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 rtnl_mutex dev_base_lock irq_context: 0 rtnl_mutex input_pool.lock irq_context: 0 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex batched_entropy_u32.lock irq_context: 0 rtnl_mutex &tbl->lock irq_context: 0 rtnl_mutex sysctl_lock irq_context: 0 rtnl_mutex nl_table_lock irq_context: 0 rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &ctrl->namespaces_rwsem irq_context: 0 rtnl_mutex rcu_read_lock &bond->stats_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock blk_queue_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex lweventlist_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock irq_context: 0 rtnl_mutex lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 rtnl_mutex lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock cpu_hotplug_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &xa->xa_lock#11 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &set->tag_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bio_slab_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock percpu_counters_lock irq_context: 0 (wq_completion)events (linkwatch_work).work irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &s->s_inode_list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &xa->xa_lock#10 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock &q->queue_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock &q->queue_lock &blkcg->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex.wait_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex dev_base_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock percpu_counters_lock irq_context: 0 once_lock irq_context: 0 once_lock crngs.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock &blkcg->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &x->wait#9 irq_context: 0 (wq_completion)events (work_completion)(&w->work) irq_context: 0 (wq_completion)events (work_completion)(&w->work) cpu_hotplug_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock nvme_subsystems_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work) cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->work) cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock irq_context: 0 (wq_completion)events (work_completion)(&w->work) &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&w->work) pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock &xa->xa_lock#12 irq_context: 0 (inet6addr_validator_chain).rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock &obj_hash[i].lock irq_context: 0 (inetaddr_validator_chain).rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex crngs.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &bdev->bd_size_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &ctrl->namespaces_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock ext_devt_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &k->list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bus_type_sem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &dev->power.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock dpm_list_mtx irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock req_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) kfence_freelist_lock irq_context: 0 (wq_completion)gid-cache-wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) pool_lock#2 irq_context: 0 &meta->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &x->wait#11 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#36 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#36 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock dev_hotplug_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex pin_fs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &stats->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock gdp_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock gdp_mutex &k->list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock uevent_sock_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock running_helpers_waitq.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#37 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#37 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock cgwb_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pin_fs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bdi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock inode_hash_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bdev_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex free_vmap_area_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex vmap_area_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex init_mm.page_table_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#9 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock#4 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &mapping->i_private_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &base->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &folio_wait_table[i] irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &ret->b_uptodate_lock irq_context: hardirq &folio_wait_table[i] irq_context: hardirq &folio_wait_table[i] &p->pi_lock irq_context: hardirq &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: hardirq &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &folio_wait_table[i] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#9 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex (console_sem).lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 subsys mutex#50 irq_context: 0 subsys mutex#50 &k->k_lock irq_context: 0 gpio_lookup_lock irq_context: 0 mdio_board_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &bdev->bd_size_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &x->wait#9 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex ext_devt_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &k->list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex bus_type_sem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &dev->power.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex dpm_list_mtx irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex req_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &x->wait#11 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex subsys mutex#36 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex subsys mutex#36 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#10 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#10 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex inode_hash_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex purge_vmap_area_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock#5 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &lruvec->lru_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock nvme_ns_chr_minor_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock chrdevs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fs_reclaim &rq->__lock irq_context: 0 mode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#51 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#51 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &dentry->d_lock irq_context: 0 misc_mtx &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 l3mdev_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock quarantine_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &cfs_rq->removed.lock irq_context: 0 &dev->mutex fs_reclaim &rq->__lock irq_context: 0 &dev->mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 &dev->mutex fs_reclaim &obj_hash[i].lock irq_context: 0 &dev->mutex fs_reclaim &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex fs_reclaim pool_lock#2 irq_context: 0 &dev->mutex init_mm.page_table_lock irq_context: 0 &dev->mutex stack_depot_init_mutex irq_context: 0 &dev->mutex pcpu_alloc_mutex irq_context: 0 &dev->mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &dev->mutex cpu_hotplug_lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex wq_pool_mutex irq_context: 0 &dev->mutex wq_pool_mutex &wq->mutex irq_context: 0 &dev->mutex pools_reg_lock irq_context: 0 &dev->mutex pools_reg_lock pools_lock irq_context: 0 &dev->mutex pools_reg_lock fs_reclaim irq_context: 0 &dev->mutex pools_reg_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex pools_reg_lock pool_lock#2 irq_context: 0 &dev->mutex pools_reg_lock lock irq_context: 0 &dev->mutex pools_reg_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex pools_reg_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex pools_reg_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &retval->lock irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &irq_desc_lock_class tmp_mask_lock irq_context: 0 &dev->mutex &irq_desc_lock_class tmp_mask_lock tmpmask_lock irq_context: 0 &dev->mutex &irq_desc_lock_class tmp_mask_lock &its->lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock &its->lock irq_context: 0 &dev->mutex rtnl_mutex irq_context: 0 &dev->mutex rtnl_mutex &c->lock irq_context: 0 &dev->mutex rtnl_mutex &____s->seqcount irq_context: 0 &dev->mutex rtnl_mutex pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rtnl_mutex fs_reclaim irq_context: 0 &dev->mutex rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rtnl_mutex &xa->xa_lock#4 irq_context: 0 &dev->mutex rtnl_mutex net_rwsem irq_context: 0 &dev->mutex rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 &dev->mutex rtnl_mutex &x->wait#9 irq_context: 0 &dev->mutex rtnl_mutex &k->list_lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 &dev->mutex rtnl_mutex lock irq_context: 0 &dev->mutex rtnl_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex rtnl_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex rtnl_mutex bus_type_sem irq_context: 0 &dev->mutex rtnl_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rtnl_mutex &dev->power.lock irq_context: 0 &dev->mutex rtnl_mutex dpm_list_mtx irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex rtnl_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex rtnl_mutex &k->k_lock irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#20 irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 &dev->mutex rtnl_mutex &dir->lock#2 irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rtnl_mutex dev_hotplug_mutex irq_context: 0 &dev->mutex rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 &dev->mutex rtnl_mutex dev_base_lock irq_context: 0 &dev->mutex rtnl_mutex input_pool.lock irq_context: 0 &dev->mutex rtnl_mutex pcpu_alloc_mutex irq_context: 0 &dev->mutex rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &dev->mutex rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u8.lock crngs.lock irq_context: 0 &dev->mutex rtnl_mutex batched_entropy_u32.lock irq_context: 0 &dev->mutex rtnl_mutex &tbl->lock irq_context: 0 &dev->mutex rtnl_mutex sysctl_lock irq_context: 0 &dev->mutex rtnl_mutex nl_table_lock irq_context: 0 &dev->mutex rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)gve irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) irq_context: 0 hnae3_common_lock irq_context: 0 subsys mutex#52 irq_context: 0 subsys mutex#52 &k->k_lock irq_context: 0 compressor_list_lock irq_context: 0 compressor_list_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem hwsim_netgroup_ida.xa_lock irq_context: 0 hwsim_radio_lock irq_context: 0 subsys mutex#53 irq_context: 0 subsys mutex#53 &k->k_lock irq_context: 0 deferred_probe_mutex irq_context: 0 rtnl_mutex param_lock irq_context: 0 rtnl_mutex param_lock rate_ctrl_mutex irq_context: 0 rtnl_mutex (console_sem).lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &k->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &k->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex kobj_ns_type_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx bus_type_sem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx sysfs_symlink_target_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &dev->power.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx running_helpers_waitq.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &k->k_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx reg_requests_lock irq_context: 0 rtnl_mutex &base->lock irq_context: 0 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 rfkill_global_mutex irq_context: 0 rfkill_global_mutex fs_reclaim irq_context: 0 rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rfkill_global_mutex pool_lock#2 irq_context: 0 rfkill_global_mutex &k->list_lock irq_context: 0 rfkill_global_mutex lock irq_context: 0 rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rfkill_global_mutex bus_type_sem irq_context: 0 rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 rfkill_global_mutex &c->lock irq_context: 0 rfkill_global_mutex &____s->seqcount irq_context: 0 rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 rfkill_global_mutex &dev->power.lock irq_context: 0 rfkill_global_mutex dpm_list_mtx irq_context: 0 rfkill_global_mutex &rfkill->lock irq_context: 0 rfkill_global_mutex uevent_sock_mutex irq_context: 0 rfkill_global_mutex &obj_hash[i].lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rfkill_global_mutex running_helpers_waitq.lock irq_context: 0 rfkill_global_mutex &k->k_lock irq_context: 0 rfkill_global_mutex subsys mutex#40 irq_context: 0 rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 rfkill_global_mutex triggers_list_lock irq_context: 0 rfkill_global_mutex leds_list_lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx stack_depot_init_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: softirq rcu_callback put_task_map-wait-type-override &meta->lock irq_context: softirq rcu_callback put_task_map-wait-type-override kfence_freelist_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#4 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx net_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &x->wait#9 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &dir->lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &dev->power.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dev_base_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx input_pool.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx batched_entropy_u32.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &tbl->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx sysctl_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &local->iflist_mtx irq_context: 0 hwsim_radio_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rfkill_global_mutex &rq->__lock irq_context: 0 rfkill_global_mutex.wait_lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 &dev->mutex crngs.lock irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#55 irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#55 &k->k_lock irq_context: 0 &dev->mutex rtnl_mutex stack_depot_init_mutex irq_context: 0 &dev->mutex rtnl_mutex crngs.lock irq_context: 0 &dev->mutex rtnl_mutex &sdata->sec_mtx irq_context: 0 &dev->mutex rtnl_mutex &sdata->sec_mtx &sec->lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &c->lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &____s->seqcount irq_context: 0 &dev->mutex rtnl_mutex &local->iflist_mtx#2 irq_context: 0 &dev->mutex hwsim_phys_lock irq_context: 0 &dev->mutex nl_table_lock irq_context: 0 &dev->mutex nl_table_wait.lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex rtnl_mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex hwsim_phys_lock fs_reclaim irq_context: 0 &dev->mutex hwsim_phys_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex hwsim_phys_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_node_0 irq_context: 0 xdomain_lock irq_context: 0 xdomain_lock fs_reclaim irq_context: 0 xdomain_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 xdomain_lock pool_lock#2 irq_context: 0 ioctl_mutex irq_context: 0 address_handler_list_lock irq_context: 0 card_mutex irq_context: 0 subsys mutex#56 irq_context: 0 subsys mutex#56 &k->k_lock irq_context: 0 &x->wait#18 irq_context: 0 &x->wait#18 &p->pi_lock irq_context: 0 &x->wait#18 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#18 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &txlock irq_context: 0 &txlock &list->lock#3 irq_context: 0 &txlock &txwq irq_context: 0 &iocq[i].lock irq_context: 0 &iocq[i].lock &ktiowq[i] irq_context: 0 rcu_read_lock &c->lock irq_context: 0 rcu_read_lock &____s->seqcount irq_context: 0 &txwq irq_context: 0 &txwq &p->pi_lock irq_context: 0 &txwq &p->pi_lock &rq->__lock irq_context: 0 &txwq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh pool_lock#2 irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 subsys mutex#57 irq_context: 0 subsys mutex#57 &k->k_lock irq_context: 0 usb_bus_idr_lock irq_context: 0 usb_bus_idr_lock (usb_notifier_list).rwsem irq_context: 0 table_lock irq_context: 0 table_lock &k->list_lock irq_context: 0 table_lock fs_reclaim irq_context: 0 table_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 table_lock pool_lock#2 irq_context: 0 table_lock lock irq_context: 0 table_lock lock kernfs_idr_lock irq_context: 0 table_lock &root->kernfs_rwsem irq_context: 0 table_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 table_lock &k->k_lock irq_context: 0 table_lock uevent_sock_mutex irq_context: 0 table_lock &obj_hash[i].lock irq_context: 0 table_lock rcu_read_lock &pool->lock irq_context: 0 table_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 table_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 table_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 table_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 table_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 table_lock running_helpers_waitq.lock irq_context: 0 table_lock &rq->__lock irq_context: 0 table_lock (console_sem).lock irq_context: 0 table_lock console_lock console_srcu console_owner_lock irq_context: 0 table_lock console_lock console_srcu console_owner irq_context: 0 table_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 table_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 table_lock &____s->seqcount irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &base->lock &obj_hash[i].lock irq_context: 0 table_lock &c->lock irq_context: 0 table_lock lock kernfs_idr_lock pool_lock#2 irq_context: softirq (&ipmi_timer) irq_context: softirq (&ipmi_timer) &ipmi_interfaces_srcu irq_context: 0 table_lock &obj_hash[i].lock pool_lock irq_context: 0 table_lock &pcp->lock &zone->lock irq_context: 0 table_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 table_lock batched_entropy_u8.lock irq_context: 0 table_lock kfence_freelist_lock irq_context: 0 table_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &dev->mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->mutex devtree_lock irq_context: 0 &dev->mutex usb_bus_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem fs_reclaim irq_context: 0 &dev->mutex (usb_notifier_list).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem pin_fs_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &x->wait#9 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &k->list_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &k->list_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex fs_reclaim irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem bus_type_sem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem sysfs_symlink_target_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &c->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &____s->seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &dev->power.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem dpm_list_mtx irq_context: 0 &dev->mutex (usb_notifier_list).rwsem req_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &p->pi_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &x->wait#11 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &rq->__lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem uevent_sock_mutex irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem running_helpers_waitq.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &k->k_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem subsys mutex#57 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem subsys mutex#57 &k->k_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem mon_lock irq_context: 0 &dev->mutex usb_port_peer_mutex irq_context: 0 &dev->mutex device_state_lock irq_context: 0 &dev->mutex usb_bus_idr_lock mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &rq->__lock irq_context: softirq &bh->lock irq_context: softirq lock#6 irq_context: softirq lock#6 kcov_remote_lock irq_context: softirq &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex usb_bus_idr_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock (console_sem).lock irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock input_pool.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock bus_type_sem irq_context: 0 &dev->mutex usb_bus_idr_lock &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock dpm_list_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock req_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &x->wait#11 irq_context: 0 &dev->mutex usb_bus_idr_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock uevent_sock_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex device_links_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex set_config_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex devtree_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &x->wait#9 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex device_state_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex bus_type_sem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex dpm_list_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &new_driver->dynids.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex device_links_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex (console_sem).lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dum_hcd->dum->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &x->wait#9 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex bus_type_sem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dpm_list_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx pm_qos_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex component_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex subsys mutex#58 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &x->wait#9 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex (usb_notifier_list).rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex usb_bus_idr_lock subsys mutex#58 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &x->wait#9 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex fs_reclaim irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &x->wait#19 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &x->wait#19 &p->pi_lock irq_context: softirq &x->wait#19 &p->pi_lock &rq->__lock irq_context: softirq &x->wait#19 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &x->wait#19 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex (&timer.timer) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex hcd_root_hub_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) lock#6 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock &dev->power.wait_queue irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &hub->irq_urb_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) (&hub->irq_urb_retry) irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &base->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_urb_unlink_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &rq->__lock irq_context: softirq usb_kill_urb_queue.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) (work_completion)(&hub->tt.clear_work) irq_context: 0 &dev->mutex udc_lock irq_context: 0 &dev->mutex subsys mutex#59 irq_context: 0 &dev->mutex subsys mutex#59 &k->k_lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex gadget_id_numbers.xa_lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) kernfs_notify_lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events kernfs_notify_work irq_context: 0 (wq_completion)events kernfs_notify_work kernfs_notify_lock irq_context: 0 (wq_completion)events kernfs_notify_work &root->kernfs_supers_rwsem irq_context: 0 &dev->mutex subsys mutex#60 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dum_hcd->dum->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) device_state_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_urb_list_lock irq_context: 0 func_lock irq_context: 0 g_tf_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &vhci_hcd->vhci->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &vhci_hcd->vhci->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &vhci_hcd->vhci->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &____s->seqcount irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) usb_kill_urb_queue.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq usb_kill_urb_queue.lock &p->pi_lock irq_context: softirq usb_kill_urb_queue.lock &p->pi_lock &rq->__lock irq_context: softirq usb_kill_urb_queue.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &vhci_hcd->vhci->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &vhci_hcd->vhci->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &lock->wait_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) fs_reclaim irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &x->wait#19 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) (&timer.timer) irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &____s->seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &cfs_rq->removed.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &pcp->lock &zone->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &lock->wait_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock batched_entropy_u8.lock irq_context: 0 &dev->mutex usb_bus_idr_lock kfence_freelist_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &x->wait#19 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 reading_mutex &x->wait#14 irq_context: 0 reading_mutex &rq->__lock irq_context: 0 reading_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 reading_mutex rcu_read_lock &rq->__lock irq_context: 0 reading_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &x->wait#14 &p->pi_lock irq_context: hardirq &x->wait#14 &p->pi_lock &rq->__lock irq_context: hardirq &x->wait#14 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &rq->__lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem batched_entropy_u8.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem kfence_freelist_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#2 &c->lock irq_context: 0 &type->i_mutex_dir_key#2 &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq &x->wait#4 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &c->lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u64.lock crngs.lock base_crng.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &meta->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &c->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex kfence_freelist_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &meta->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->mutex &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex &dev->mutex &lock->wait_lock irq_context: 0 &dev->mutex &dev->mutex &rq->__lock irq_context: 0 &dev->mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &base->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 input_ida.xa_lock irq_context: 0 input_ida.xa_lock pool_lock#2 irq_context: 0 subsys mutex#31 irq_context: 0 subsys mutex#31 &k->k_lock irq_context: 0 input_mutex input_ida.xa_lock irq_context: 0 input_mutex fs_reclaim irq_context: 0 input_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 input_mutex pool_lock#2 irq_context: 0 input_mutex &x->wait#9 irq_context: 0 input_mutex &obj_hash[i].lock irq_context: 0 input_mutex &dev->mutex#2 irq_context: 0 input_mutex chrdevs_lock irq_context: 0 input_mutex &k->list_lock irq_context: 0 input_mutex lock irq_context: 0 input_mutex lock kernfs_idr_lock irq_context: 0 input_mutex &root->kernfs_rwsem irq_context: 0 input_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 input_mutex bus_type_sem irq_context: 0 input_mutex sysfs_symlink_target_lock irq_context: 0 input_mutex &root->kernfs_rwsem irq_context: 0 input_mutex &dev->power.lock irq_context: 0 input_mutex dpm_list_mtx irq_context: 0 input_mutex req_lock irq_context: 0 input_mutex &p->pi_lock irq_context: 0 input_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 input_mutex &x->wait#11 irq_context: 0 input_mutex &rq->__lock irq_context: 0 input_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 input_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 input_mutex &c->lock irq_context: 0 input_mutex &____s->seqcount irq_context: 0 input_mutex uevent_sock_mutex irq_context: 0 input_mutex rcu_read_lock &pool->lock irq_context: 0 input_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 input_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 input_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 input_mutex running_helpers_waitq.lock irq_context: 0 input_mutex &k->k_lock irq_context: 0 input_mutex subsys mutex#31 irq_context: 0 input_mutex subsys mutex#31 &k->k_lock irq_context: 0 input_mutex &p->pi_lock &rq->__lock irq_context: 0 input_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 input_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 serio_event_lock irq_context: 0 serio_event_lock pool_lock#2 irq_context: 0 serio_event_lock rcu_read_lock &pool->lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_long irq_context: 0 (wq_completion)events_long serio_event_work irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex serio_event_lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex &k->list_lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex &k->k_lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex pool_lock#2 irq_context: 0 &new_driver->dynids.lock irq_context: 0 misc_mtx batched_entropy_u8.lock irq_context: 0 misc_mtx kfence_freelist_lock irq_context: 0 &dev->mutex (efi_runtime_lock).lock irq_context: 0 &dev->mutex &x->wait#12 irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex rtc_ida.xa_lock irq_context: 0 &dev->mutex &rtc->ops_lock irq_context: 0 &dev->mutex &rtc->ops_lock (efi_runtime_lock).lock irq_context: 0 &dev->mutex &rtc->ops_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &rtc->ops_lock &x->wait#12 irq_context: 0 &dev->mutex &rtc->ops_lock &rq->__lock irq_context: 0 &dev->mutex &rtc->ops_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &rtc->ops_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex chrdevs_lock irq_context: 0 &dev->mutex req_lock irq_context: 0 &dev->mutex &x->wait#11 irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex subsys mutex#27 irq_context: 0 &dev->mutex subsys mutex#27 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#27 fs_reclaim irq_context: 0 &dev->mutex subsys mutex#27 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex subsys mutex#27 &c->lock irq_context: 0 &dev->mutex subsys mutex#27 &____s->seqcount irq_context: 0 &dev->mutex subsys mutex#27 pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &x->wait#9 irq_context: 0 &dev->mutex subsys mutex#27 &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 platform_devid_ida.xa_lock irq_context: 0 &dev->mutex subsys mutex#27 &k->list_lock irq_context: 0 &dev->mutex subsys mutex#27 lock irq_context: 0 &dev->mutex subsys mutex#27 lock kernfs_idr_lock irq_context: 0 &dev->mutex subsys mutex#27 &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex subsys mutex#27 bus_type_sem irq_context: 0 &dev->mutex subsys mutex#27 sysfs_symlink_target_lock irq_context: 0 &dev->mutex subsys mutex#27 &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->power.lock irq_context: 0 &dev->mutex subsys mutex#27 dpm_list_mtx irq_context: 0 &dev->mutex subsys mutex#27 &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex subsys mutex#27 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock irq_context: 0 &dev->mutex subsys mutex#27 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock iommu_device_lock irq_context: 0 &dev->mutex subsys mutex#27 uevent_sock_mutex irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex subsys mutex#27 running_helpers_waitq.lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &k->list_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &k->k_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex device_links_srcu irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex device_links_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex fs_reclaim irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &c->lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex subsys mutex#27 subsys mutex#4 irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex subsys mutex#27 wakeup_ida.xa_lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex &k->list_lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex fs_reclaim irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex subsys mutex#27 subsys mutex#13 irq_context: 0 &dev->mutex subsys mutex#27 subsys mutex#13 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#27 events_lock irq_context: 0 &dev->mutex subsys mutex#27 rtcdev_lock irq_context: 0 g_smscore_deviceslock irq_context: 0 g_smscore_deviceslock fs_reclaim irq_context: 0 g_smscore_deviceslock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 g_smscore_deviceslock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock pool_lock#2 irq_context: 0 cx231xx_devlist_mutex irq_context: 0 em28xx_devlist_mutex irq_context: 0 pvr2_context_sync_data.lock irq_context: 0 &dev->mutex core_lock irq_context: 0 &dev->mutex core_lock fs_reclaim irq_context: 0 &dev->mutex core_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex core_lock &c->lock irq_context: 0 &dev->mutex core_lock &____s->seqcount irq_context: 0 &dev->mutex core_lock pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem i2c_dev_list_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &x->wait#9 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem chrdevs_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &k->list_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex &k->list_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex fs_reclaim irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem bus_type_sem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem sysfs_symlink_target_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &dev->power.lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem dpm_list_mtx irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &c->lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &____s->seqcount irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem req_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &x->wait#11 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem uevent_sock_mutex irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem running_helpers_waitq.lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &k->k_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem subsys mutex#61 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem subsys mutex#61 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#62 irq_context: 0 &dev->mutex pin_fs_lock irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 &dev->mutex core_lock &k->list_lock irq_context: 0 &dev->mutex core_lock &k->k_lock irq_context: 0 &dev->mutex dvbdev_register_lock irq_context: 0 &dev->mutex dvbdev_register_lock (console_sem).lock irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex (kmod_concurrent_max).lock irq_context: 0 &dev->mutex &x->wait#16 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#16 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &dev->mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 &dev->mutex &dev->mutex device_links_srcu irq_context: 0 &dev->mutex &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex &dev->mutex device_links_lock irq_context: 0 &dev->mutex &dev->mutex fs_reclaim irq_context: 0 &dev->mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex &dev->mutex &c->lock irq_context: 0 &dev->mutex &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex &dev->mutex lock irq_context: 0 &dev->mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex frontend_mutex irq_context: 0 &dev->mutex frontend_mutex fs_reclaim irq_context: 0 &dev->mutex frontend_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex (console_sem).lock irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock fs_reclaim irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock minor_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &xa->xa_lock#13 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock (console_sem).lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &x->wait#9 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &obj_hash[i].lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &k->list_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &k->list_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex fs_reclaim irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock bus_type_sem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &c->lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &____s->seqcount irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &dev->power.lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock dpm_list_mtx irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock req_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &p->pi_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &x->wait#11 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &rq->__lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock uevent_sock_mutex irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &k->k_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock subsys mutex#63 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock subsys mutex#63 &k->k_lock irq_context: 0 &dev->mutex &dmxdev->lock irq_context: 0 &dev->mutex dvbdev_register_lock fs_reclaim irq_context: 0 &dev->mutex dvbdev_register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex dvbdev_register_lock pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock minor_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock &____s->seqcount irq_context: 0 &dev->mutex dvbdev_register_lock &xa->xa_lock#13 irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex irq_context: 0 &dev->mutex dvbdev_register_lock &xa->xa_lock#13 pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex dvbdev_register_lock &c->lock irq_context: 0 &dev->mutex dvbdev_register_lock &x->wait#9 irq_context: 0 &dev->mutex dvbdev_register_lock &obj_hash[i].lock irq_context: 0 &dev->mutex dvbdev_register_lock &k->list_lock irq_context: 0 &dev->mutex dvbdev_register_lock gdp_mutex irq_context: 0 &dev->mutex dvbdev_register_lock gdp_mutex &k->list_lock irq_context: 0 &dev->mutex dvbdev_register_lock lock irq_context: 0 &dev->mutex dvbdev_register_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock bus_type_sem irq_context: 0 &dev->mutex dvbdev_register_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock &dev->power.lock irq_context: 0 &dev->mutex dvbdev_register_lock dpm_list_mtx irq_context: 0 &dev->mutex dvbdev_register_lock req_lock irq_context: 0 &dev->mutex dvbdev_register_lock &p->pi_lock irq_context: 0 &dev->mutex dvbdev_register_lock &x->wait#11 irq_context: 0 &dev->mutex dvbdev_register_lock &rq->__lock irq_context: 0 &dev->mutex dvbdev_register_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dvbdev_register_lock uevent_sock_mutex irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex dvbdev_register_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex dvbdev_register_lock &k->k_lock irq_context: 0 &dev->mutex dvbdev_register_lock subsys mutex#63 irq_context: 0 &dev->mutex dvbdev_register_lock subsys mutex#63 &k->k_lock irq_context: 0 &dev->mutex dvbdev_register_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex dvbdev_register_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex &dvbdemux->mutex irq_context: 0 &dev->mutex media_devnode_lock irq_context: 0 &dev->mutex subsys mutex#64 irq_context: 0 &dev->mutex videodev_lock irq_context: 0 &dev->mutex subsys mutex#65 irq_context: 0 &dev->mutex subsys mutex#65 &k->k_lock irq_context: 0 &dev->mutex &xa->xa_lock#13 irq_context: 0 &dev->mutex &mdev->graph_mutex irq_context: 0 &dev->mutex &mdev->graph_mutex fs_reclaim irq_context: 0 &dev->mutex &mdev->graph_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &mdev->graph_mutex pool_lock#2 irq_context: 0 &dev->mutex vimc_sensor:396:(&vsensor->hdl)->_lock irq_context: 0 &dev->mutex &v4l2_dev->lock irq_context: 0 &dev->mutex vimc_debayer:581:(&vdebayer->hdl)->_lock irq_context: 0 &dev->mutex vimc_lens:61:(&vlens->hdl)->_lock irq_context: 0 &dev->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex tk_core.seq.seqcount irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1618:(hdl_fb)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1627:(hdl_vbi_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1630:(hdl_radio_rx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1632:(hdl_radio_tx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1634:(hdl_sdr_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1636:(hdl_meta_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1638:(hdl_meta_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1640:(hdl_tch_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &c->lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock &c->lock irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock &c->lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1627:(hdl_vbi_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1627:(hdl_vbi_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1630:(hdl_radio_rx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1630:(hdl_radio_rx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1632:(hdl_radio_tx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1632:(hdl_radio_tx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1634:(hdl_sdr_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1634:(hdl_sdr_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1636:(hdl_meta_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1636:(hdl_meta_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1638:(hdl_meta_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1638:(hdl_meta_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1640:(hdl_tch_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1640:(hdl_tch_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &zone->lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &obj_hash[i].lock irq_context: 0 &adap->kthread_waitq irq_context: 0 &dev->cec_xfers_slock irq_context: 0 &dev->kthread_waitq_cec irq_context: 0 &dev->mutex cec_devnode_lock irq_context: 0 &dev->mutex subsys mutex#66 irq_context: 0 &dev->mutex &adap->lock irq_context: 0 &dev->mutex &adap->lock tk_core.seq.seqcount irq_context: 0 &dev->mutex &adap->lock &adap->devnode.lock_fhs irq_context: 0 &dev->mutex &mdev->graph_mutex &c->lock irq_context: 0 &dev->mutex &mdev->graph_mutex &____s->seqcount irq_context: 0 &dev->mutex lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 ptp_clocks_map.xa_lock irq_context: 0 subsys mutex#67 irq_context: 0 subsys mutex#67 &k->k_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 pers_lock irq_context: 0 _lock irq_context: 0 dm_bufio_clients_lock irq_context: 0 _ps_lock irq_context: 0 _lock#2 irq_context: 0 _lock#3 irq_context: 0 register_lock#2 irq_context: 0 subsys mutex#68 irq_context: 0 subsys mutex#68 &k->k_lock irq_context: 0 bp_lock irq_context: 0 bp_lock irq_context: 0 subsys mutex#69 irq_context: 0 subsys mutex#69 &k->k_lock irq_context: 0 free_vmap_area_lock &obj_hash[i].lock irq_context: 0 free_vmap_area_lock pool_lock#2 irq_context: softirq (&dsp_spl_tl) irq_context: softirq (&dsp_spl_tl) dsp_lock irq_context: softirq (&dsp_spl_tl) dsp_lock iclock_lock irq_context: softirq (&dsp_spl_tl) dsp_lock iclock_lock tk_core.seq.seqcount irq_context: softirq (&dsp_spl_tl) dsp_lock &obj_hash[i].lock irq_context: softirq (&dsp_spl_tl) dsp_lock &base->lock irq_context: softirq (&dsp_spl_tl) dsp_lock &base->lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex lock#7 irq_context: 0 iscsi_transport_lock irq_context: 0 subsys mutex#70 irq_context: 0 subsys mutex#70 &k->k_lock irq_context: 0 &tx_task->waiting irq_context: 0 link_ops_rwsem irq_context: 0 disable_lock irq_context: 0 disable_lock fs_reclaim irq_context: 0 disable_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 disable_lock pool_lock#2 irq_context: 0 disable_lock &x->wait#9 irq_context: 0 disable_lock &obj_hash[i].lock irq_context: 0 disable_lock &k->list_lock irq_context: 0 disable_lock lock irq_context: 0 disable_lock lock kernfs_idr_lock irq_context: 0 disable_lock &root->kernfs_rwsem irq_context: 0 disable_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 disable_lock bus_type_sem irq_context: 0 disable_lock sysfs_symlink_target_lock irq_context: 0 disable_lock &k->k_lock irq_context: 0 disable_lock &root->kernfs_rwsem irq_context: 0 disable_lock &dev->power.lock irq_context: 0 disable_lock dpm_list_mtx irq_context: 0 disable_lock &(&priv->bus_notifier)->rwsem irq_context: 0 disable_lock &(&priv->bus_notifier)->rwsem iommu_probe_device_lock irq_context: 0 disable_lock &(&priv->bus_notifier)->rwsem iommu_probe_device_lock iommu_device_lock irq_context: 0 disable_lock &c->lock irq_context: 0 disable_lock &____s->seqcount irq_context: 0 disable_lock uevent_sock_mutex irq_context: 0 disable_lock rcu_read_lock &pool->lock irq_context: 0 disable_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 disable_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 disable_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 disable_lock running_helpers_waitq.lock irq_context: 0 disable_lock &dev->mutex &dev->power.lock irq_context: 0 disable_lock &dev->mutex &k->list_lock irq_context: 0 disable_lock &dev->mutex &k->k_lock irq_context: 0 disable_lock subsys mutex#4 irq_context: 0 protocol_lock irq_context: 0 protocol_lock pool_lock#2 irq_context: 0 psinfo_lock irq_context: 0 psinfo_lock fs_reclaim irq_context: 0 psinfo_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 psinfo_lock pool_lock#2 irq_context: 0 psinfo_lock free_vmap_area_lock irq_context: 0 psinfo_lock vmap_area_lock irq_context: 0 psinfo_lock &____s->seqcount irq_context: 0 psinfo_lock init_mm.page_table_lock irq_context: 0 psinfo_lock (console_sem).lock irq_context: 0 psinfo_lock console_lock console_srcu console_owner_lock irq_context: 0 psinfo_lock console_lock console_srcu console_owner irq_context: 0 psinfo_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 psinfo_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 psinfo_lock pstore_sb_lock irq_context: 0 psinfo_lock dump_list_lock irq_context: 0 psinfo_lock &rq->__lock irq_context: 0 psinfo_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &device->physical_node_lock &rq->__lock irq_context: 0 &device->physical_node_lock &cfs_rq->removed.lock irq_context: 0 &device->physical_node_lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem &obj_hash[i].lock pool_lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 vsock_register_mutex irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 comedi_drivers_list_lock irq_context: 0 core_lock &rq->__lock irq_context: 0 core_lock &cfs_rq->removed.lock irq_context: 0 core_lock &obj_hash[i].lock irq_context: 0 core_lock pool_lock#2 irq_context: 0 &domain->mutex irq_context: 0 &domain->mutex sparse_irq_lock irq_context: 0 &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 &domain->mutex sparse_irq_lock lock irq_context: 0 &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &domain->mutex sparse_irq_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &domain->mutex fs_reclaim irq_context: 0 &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &domain->mutex pool_lock#2 irq_context: 0 &domain->mutex &irq_desc_lock_class irq_context: 0 &desc->request_mutex irq_context: 0 &desc->request_mutex &irq_desc_lock_class irq_context: 0 &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 cscfg_mutex irq_context: 0 cscfg_mutex fs_reclaim irq_context: 0 cscfg_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cscfg_mutex pool_lock#2 irq_context: 0 cscfg_mutex &x->wait#9 irq_context: 0 cscfg_mutex &obj_hash[i].lock irq_context: 0 cscfg_mutex &k->list_lock irq_context: 0 cscfg_mutex &rq->__lock irq_context: 0 cscfg_mutex &cfs_rq->removed.lock irq_context: 0 cscfg_mutex lock irq_context: 0 cscfg_mutex lock kernfs_idr_lock irq_context: 0 cscfg_mutex &root->kernfs_rwsem irq_context: 0 cscfg_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cscfg_mutex bus_type_sem irq_context: 0 cscfg_mutex &root->kernfs_rwsem irq_context: 0 cscfg_mutex &c->lock irq_context: 0 cscfg_mutex &____s->seqcount irq_context: 0 cscfg_mutex &dev->power.lock irq_context: 0 cscfg_mutex dpm_list_mtx irq_context: 0 &sb->s_type->i_mutex_key#6/1 &rq->__lock irq_context: 0 fs_reclaim icc_bw_lock irq_context: 0 subsys mutex#71 irq_context: 0 subsys mutex#71 &k->k_lock irq_context: 0 snd_ctl_layer_rwsem irq_context: 0 snd_card_mutex irq_context: 0 snd_ioctl_rwsem irq_context: 0 strings irq_context: 0 strings fs_reclaim irq_context: 0 strings fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 strings &____s->seqcount irq_context: 0 strings pool_lock#2 irq_context: 0 register_mutex irq_context: 0 sound_mutex irq_context: 0 sound_mutex fs_reclaim irq_context: 0 sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sound_mutex pool_lock#2 irq_context: 0 sound_mutex &k->list_lock irq_context: 0 sound_mutex gdp_mutex irq_context: 0 sound_mutex gdp_mutex &k->list_lock irq_context: 0 sound_mutex lock irq_context: 0 sound_mutex lock kernfs_idr_lock irq_context: 0 sound_mutex &root->kernfs_rwsem irq_context: 0 sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sound_mutex bus_type_sem irq_context: 0 sound_mutex sysfs_symlink_target_lock irq_context: 0 sound_mutex &root->kernfs_rwsem irq_context: 0 sound_mutex &c->lock irq_context: 0 sound_mutex &____s->seqcount irq_context: 0 sound_mutex &dev->power.lock irq_context: 0 sound_mutex dpm_list_mtx irq_context: 0 sound_mutex req_lock irq_context: 0 sound_mutex &p->pi_lock irq_context: 0 sound_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sound_mutex &p->pi_lock &rq->__lock irq_context: 0 sound_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sound_mutex &rq->__lock irq_context: 0 sound_mutex &x->wait#11 irq_context: 0 sound_mutex &obj_hash[i].lock irq_context: 0 sound_mutex uevent_sock_mutex irq_context: 0 sound_mutex rcu_read_lock &pool->lock irq_context: 0 sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sound_mutex running_helpers_waitq.lock irq_context: 0 sound_mutex subsys mutex#71 irq_context: 0 sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 register_mutex#2 irq_context: 0 register_mutex#3 irq_context: 0 register_mutex#3 fs_reclaim irq_context: 0 register_mutex#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#3 pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex irq_context: 0 register_mutex#3 sound_mutex fs_reclaim irq_context: 0 register_mutex#3 sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#3 sound_mutex pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex &k->list_lock irq_context: 0 register_mutex#3 sound_mutex gdp_mutex irq_context: 0 register_mutex#3 sound_mutex gdp_mutex &k->list_lock irq_context: 0 register_mutex#3 sound_mutex lock irq_context: 0 register_mutex#3 sound_mutex lock kernfs_idr_lock irq_context: 0 register_mutex#3 sound_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#3 sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 register_mutex#3 sound_mutex bus_type_sem irq_context: 0 register_mutex#3 sound_mutex sysfs_symlink_target_lock irq_context: 0 register_mutex#3 sound_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#3 sound_mutex &dev->power.lock irq_context: 0 register_mutex#3 sound_mutex dpm_list_mtx irq_context: 0 register_mutex#3 sound_mutex req_lock irq_context: 0 register_mutex#3 sound_mutex &p->pi_lock irq_context: 0 register_mutex#3 sound_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 register_mutex#3 sound_mutex &x->wait#11 irq_context: 0 register_mutex#3 sound_mutex &rq->__lock irq_context: 0 register_mutex#3 sound_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#3 sound_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 register_mutex#3 sound_mutex &obj_hash[i].lock irq_context: 0 register_mutex#3 sound_mutex uevent_sock_mutex irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 register_mutex#3 sound_mutex running_helpers_waitq.lock irq_context: 0 register_mutex#3 sound_mutex subsys mutex#71 irq_context: 0 register_mutex#3 sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 register_mutex#3 &c->lock irq_context: 0 register_mutex#3 &____s->seqcount irq_context: 0 register_mutex#3 clients_lock irq_context: 0 &client->ports_mutex irq_context: 0 &client->ports_mutex &client->ports_lock irq_context: 0 register_mutex#4 irq_context: 0 register_mutex#4 fs_reclaim irq_context: 0 register_mutex#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#4 pool_lock#2 irq_context: 0 register_mutex#4 sound_oss_mutex irq_context: 0 register_mutex#4 sound_oss_mutex fs_reclaim irq_context: 0 register_mutex#4 sound_oss_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#4 sound_oss_mutex &c->lock irq_context: 0 register_mutex#4 sound_oss_mutex &____s->seqcount irq_context: 0 register_mutex#4 sound_oss_mutex pool_lock#2 irq_context: 0 register_mutex#4 sound_oss_mutex sound_loader_lock irq_context: 0 register_mutex#4 sound_oss_mutex &x->wait#9 irq_context: 0 register_mutex#4 sound_oss_mutex &obj_hash[i].lock irq_context: 0 register_mutex#4 sound_oss_mutex &k->list_lock irq_context: 0 register_mutex#4 sound_oss_mutex gdp_mutex irq_context: 0 register_mutex#4 sound_oss_mutex gdp_mutex &k->list_lock irq_context: 0 register_mutex#4 sound_oss_mutex lock irq_context: 0 register_mutex#4 sound_oss_mutex lock kernfs_idr_lock irq_context: 0 register_mutex#4 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#4 sound_oss_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 register_mutex#4 sound_oss_mutex bus_type_sem irq_context: 0 register_mutex#4 sound_oss_mutex sysfs_symlink_target_lock irq_context: 0 register_mutex#4 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#4 sound_oss_mutex &dev->power.lock irq_context: 0 register_mutex#4 sound_oss_mutex dpm_list_mtx irq_context: 0 register_mutex#4 sound_oss_mutex req_lock irq_context: 0 register_mutex#4 sound_oss_mutex &p->pi_lock irq_context: 0 register_mutex#4 sound_oss_mutex &x->wait#11 irq_context: 0 register_mutex#4 sound_oss_mutex &rq->__lock irq_context: 0 register_mutex#4 sound_oss_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#4 sound_oss_mutex uevent_sock_mutex irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 register_mutex#4 sound_oss_mutex running_helpers_waitq.lock irq_context: 0 register_mutex#4 sound_oss_mutex subsys mutex#71 irq_context: 0 register_mutex#4 sound_oss_mutex subsys mutex#71 &k->k_lock irq_context: 0 register_mutex#4 sound_oss_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 register_mutex#4 sound_oss_mutex &p->pi_lock &rq->__lock irq_context: 0 register_mutex#4 sound_oss_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 clients_lock irq_context: 0 &client->ports_lock irq_context: 0 &grp->list_mutex/1 irq_context: 0 &grp->list_mutex#2 irq_context: 0 &grp->list_mutex#2 &grp->list_lock irq_context: 0 &grp->list_mutex/1 clients_lock irq_context: 0 &grp->list_mutex/1 &client->ports_lock irq_context: 0 &grp->list_mutex/1 register_lock#3 irq_context: 0 &grp->list_mutex/1 fs_reclaim irq_context: 0 &grp->list_mutex/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &grp->list_mutex/1 pool_lock#2 irq_context: 0 &grp->list_mutex/1 &c->lock irq_context: 0 &grp->list_mutex/1 &____s->seqcount irq_context: 0 &dev->mutex snd_card_mutex irq_context: 0 &dev->mutex &entry->access irq_context: 0 &dev->mutex info_mutex irq_context: 0 &dev->mutex info_mutex proc_subdir_lock irq_context: 0 &dev->mutex info_mutex fs_reclaim irq_context: 0 &dev->mutex info_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex info_mutex pool_lock#2 irq_context: 0 &dev->mutex info_mutex proc_inum_ida.xa_lock irq_context: 0 &dev->mutex info_mutex proc_subdir_lock irq_context: 0 &dev->mutex &card->controls_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &c->lock irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &____s->seqcount irq_context: 0 &dev->mutex &card->controls_rwsem &card->ctl_files_rwlock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem irq_context: 0 &dev->mutex subsys mutex#71 irq_context: 0 &dev->mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 irq_context: 0 &dev->mutex register_mutex#2 fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_mutex fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 sound_mutex pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &k->list_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex bus_type_sem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &dev->power.lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex dpm_list_mtx irq_context: 0 &dev->mutex register_mutex#2 sound_mutex req_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &x->wait#11 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events async_lookup_work irq_context: 0 (wq_completion)events async_lookup_work fs_reclaim irq_context: 0 (wq_completion)events async_lookup_work fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events async_lookup_work pool_lock#2 irq_context: 0 (wq_completion)events async_lookup_work clients_lock irq_context: 0 (wq_completion)events async_lookup_work &client->ports_lock irq_context: 0 (wq_completion)events async_lookup_work register_lock#3 irq_context: 0 (wq_completion)events async_lookup_work snd_card_mutex irq_context: 0 (wq_completion)events async_lookup_work rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events async_lookup_work rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events async_lookup_work autoload_work irq_context: 0 (wq_completion)events async_lookup_work &x->wait#10 irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events async_lookup_work &rq->__lock irq_context: 0 (wq_completion)events async_lookup_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events autoload_work irq_context: 0 (wq_completion)events autoload_work &k->list_lock irq_context: 0 (wq_completion)events autoload_work &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&barr->work) irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events async_lookup_work &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_mutex uevent_sock_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 sound_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex subsys mutex#71 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex register_mutex#2 register_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &c->lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 &c->lock irq_context: 0 &dev->mutex register_mutex#2 &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex sound_loader_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &x->wait#9 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &k->list_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex bus_type_sem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &dev->power.lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex dpm_list_mtx irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex req_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &x->wait#11 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex uevent_sock_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex subsys mutex#71 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &c->lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 strings irq_context: 0 &dev->mutex register_mutex#2 strings fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 strings fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 strings pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 &entry->access irq_context: 0 &dev->mutex register_mutex#2 info_mutex irq_context: 0 &dev->mutex sound_mutex irq_context: 0 &dev->mutex sound_mutex fs_reclaim irq_context: 0 &dev->mutex sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex sound_mutex pool_lock#2 irq_context: 0 &dev->mutex sound_mutex &k->list_lock irq_context: 0 &dev->mutex sound_mutex lock irq_context: 0 &dev->mutex sound_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex sound_mutex bus_type_sem irq_context: 0 &dev->mutex sound_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex sound_mutex &c->lock irq_context: 0 &dev->mutex sound_mutex &____s->seqcount irq_context: 0 &dev->mutex sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_mutex &dev->power.lock irq_context: 0 &dev->mutex sound_mutex dpm_list_mtx irq_context: 0 &dev->mutex sound_mutex req_lock irq_context: 0 &dev->mutex sound_mutex &p->pi_lock irq_context: 0 &dev->mutex sound_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex sound_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex sound_mutex &rq->__lock irq_context: 0 &dev->mutex sound_mutex &x->wait#11 irq_context: 0 &dev->mutex sound_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex sound_mutex uevent_sock_mutex irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex sound_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex sound_mutex &k->k_lock irq_context: 0 &dev->mutex sound_mutex subsys mutex#71 irq_context: 0 &dev->mutex sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex &card->controls_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem snd_ctl_led_mutex irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem fs_reclaim irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &x->wait#9 irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &k->list_lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem bus_type_sem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &c->lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &____s->seqcount irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &dev->power.lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem dpm_list_mtx irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &k->k_lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem sysfs_symlink_target_lock irq_context: 0 &dev->mutex info_mutex &c->lock irq_context: 0 &dev->mutex info_mutex &____s->seqcount irq_context: 0 &dev->mutex sound_oss_mutex irq_context: 0 &dev->mutex sound_oss_mutex fs_reclaim irq_context: 0 &dev->mutex sound_oss_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex sound_oss_mutex pool_lock#2 irq_context: 0 &dev->mutex sound_oss_mutex sound_loader_lock irq_context: 0 &dev->mutex sound_oss_mutex &c->lock irq_context: 0 &dev->mutex sound_oss_mutex &____s->seqcount irq_context: 0 &dev->mutex sound_oss_mutex &x->wait#9 irq_context: 0 &dev->mutex sound_oss_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex sound_oss_mutex &k->list_lock irq_context: 0 &dev->mutex sound_oss_mutex lock irq_context: 0 &dev->mutex sound_oss_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_oss_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex sound_oss_mutex bus_type_sem irq_context: 0 &dev->mutex sound_oss_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_oss_mutex &dev->power.lock irq_context: 0 &dev->mutex sound_oss_mutex dpm_list_mtx irq_context: 0 &dev->mutex sound_oss_mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex sound_oss_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex sound_oss_mutex req_lock irq_context: 0 &dev->mutex sound_oss_mutex &p->pi_lock irq_context: 0 &dev->mutex sound_oss_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex sound_oss_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex sound_oss_mutex &rq->__lock irq_context: 0 &dev->mutex sound_oss_mutex &x->wait#11 irq_context: 0 &dev->mutex sound_oss_mutex uevent_sock_mutex irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex sound_oss_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex sound_oss_mutex &k->k_lock irq_context: 0 &dev->mutex sound_oss_mutex subsys mutex#71 irq_context: 0 &dev->mutex sound_oss_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex strings irq_context: 0 &dev->mutex strings fs_reclaim irq_context: 0 &dev->mutex strings fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex strings pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &card->controls_rwsem pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem &c->lock irq_context: 0 &dev->mutex &card->controls_rwsem &____s->seqcount irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &pcp->lock &zone->lock irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 &pcp->lock &zone->lock irq_context: 0 &dev->mutex register_mutex#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex sound_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex sound_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &dev->mutex register_mutex#5 irq_context: 0 &dev->mutex register_mutex#3 irq_context: 0 &dev->mutex register_mutex#3 fs_reclaim irq_context: 0 &dev->mutex register_mutex#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#3 pool_lock#2 irq_context: 0 &dev->mutex register_mutex#3 clients_lock irq_context: 0 &dev->mutex clients_lock irq_context: 0 &dev->mutex &client->ports_lock irq_context: 0 &dev->mutex &grp->list_mutex/1 irq_context: 0 &dev->mutex &grp->list_mutex/1 clients_lock irq_context: 0 &dev->mutex &grp->list_mutex/1 &client->ports_lock irq_context: 0 &dev->mutex &client->ports_mutex irq_context: 0 &dev->mutex &client->ports_mutex &client->ports_lock irq_context: 0 &dev->mutex &grp->list_mutex/1 register_lock#3 irq_context: 0 &dev->mutex &grp->list_mutex/1 fs_reclaim irq_context: 0 &dev->mutex &grp->list_mutex/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &grp->list_mutex/1 &____s->seqcount irq_context: 0 &dev->mutex &grp->list_mutex/1 pool_lock#2 irq_context: 0 &dev->mutex sound_oss_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex sound_oss_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &grp->list_mutex/1 &c->lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &pcp->lock &zone->lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex client_mutex irq_context: 0 &dev->mutex client_mutex fs_reclaim irq_context: 0 &dev->mutex client_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex client_mutex pool_lock#2 irq_context: 0 &dev->mutex client_mutex &dev->devres_lock irq_context: 0 &wq->mutex &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex failover_lock irq_context: 0 llc_sap_list_lock irq_context: 0 llc_sap_list_lock pool_lock#2 irq_context: 0 act_id_mutex irq_context: 0 act_id_mutex fs_reclaim irq_context: 0 act_id_mutex fs_reclaim &rq->__lock irq_context: 0 act_id_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 act_id_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 act_id_mutex fs_reclaim pool_lock#2 irq_context: 0 act_id_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 act_id_mutex pool_lock#2 irq_context: 0 act_mod_lock irq_context: 0 ife_mod_lock irq_context: 0 rtnl_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex batched_entropy_u8.lock irq_context: 0 rtnl_mutex kfence_freelist_lock irq_context: 0 cls_mod_lock irq_context: 0 ematch_mod_lock irq_context: 0 sock_diag_table_mutex irq_context: 0 nfnl_subsys_acct irq_context: 0 nfnl_subsys_queue irq_context: 0 nfnl_subsys_ulog irq_context: 0 nf_log_mutex irq_context: 0 nfnl_subsys_osf irq_context: 0 nf_sockopt_mutex irq_context: 0 nfnl_subsys_ctnetlink irq_context: 0 nfnl_subsys_ctnetlink_exp irq_context: 0 pernet_ops_rwsem nf_ct_ecache_mutex irq_context: 0 nfnl_subsys_cttimeout irq_context: 0 nfnl_subsys_cthelper irq_context: 0 nf_ct_helper_mutex irq_context: 0 pernet_ops_rwsem nf_log_mutex irq_context: 0 nf_conntrack_expect_lock irq_context: 0 nf_ct_nat_helpers_mutex irq_context: 0 nfnl_subsys_nftables irq_context: 0 nfnl_subsys_nftcompat irq_context: 0 masq_mutex irq_context: 0 masq_mutex pernet_ops_rwsem irq_context: 0 masq_mutex pernet_ops_rwsem rtnl_mutex irq_context: 0 masq_mutex (inetaddr_chain).rwsem irq_context: 0 masq_mutex inet6addr_chain.lock irq_context: 0 &xt[i].mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &tn->lock irq_context: 0 subsys mutex#72 irq_context: 0 subsys mutex#72 &k->k_lock irq_context: 0 nfnl_subsys_ipset irq_context: 0 ip_set_type_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem &base->lock irq_context: 0 pernet_ops_rwsem &base->lock &obj_hash[i].lock irq_context: 0 ip_vs_sched_mutex irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex pool_lock#2 irq_context: 0 ip_vs_pe_mutex irq_context: 0 tunnel4_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 xfrm4_protocol_mutex irq_context: 0 &xt[i].mutex fs_reclaim irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &xt[i].mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem net_generic_ids.xa_lock &c->lock irq_context: 0 pernet_ops_rwsem net_generic_ids.xa_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem net_generic_ids.xa_lock pool_lock#2 irq_context: 0 inet_diag_table_mutex irq_context: 0 xfrm_km_lock irq_context: 0 xfrm6_protocol_mutex irq_context: 0 tunnel6_mutex irq_context: 0 xfrm_if_cb_lock irq_context: 0 inetsw6_lock irq_context: 0 &hashinfo->lock#2 irq_context: 0 pernet_ops_rwsem &hashinfo->lock#2 irq_context: 0 pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 pernet_ops_rwsem rtnl_mutex &base->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock crngs.lock irq_context: 0 (crypto_chain).rwsem irq_context: 0 (crypto_chain).rwsem fs_reclaim irq_context: 0 (crypto_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (crypto_chain).rwsem &c->lock irq_context: 0 (crypto_chain).rwsem &____s->seqcount irq_context: 0 (crypto_chain).rwsem pool_lock#2 irq_context: 0 (crypto_chain).rwsem kthread_create_lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (crypto_chain).rwsem &x->wait irq_context: 0 (crypto_chain).rwsem &rq->__lock irq_context: 0 (crypto_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (crypto_chain).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (crypto_chain).rwsem &obj_hash[i].lock irq_context: 0 &x->wait#20 irq_context: 0 &x->wait#20 &p->pi_lock irq_context: 0 &p->alloc_lock &x->wait irq_context: 0 &xt[i].mutex &c->lock irq_context: 0 &xt[i].mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 stp_proto_mutex irq_context: 0 stp_proto_mutex llc_sap_list_lock irq_context: 0 stp_proto_mutex llc_sap_list_lock pool_lock#2 irq_context: 0 switchdev_notif_chain.lock irq_context: 0 (switchdev_blocking_notif_chain).rwsem irq_context: 0 br_ioctl_mutex irq_context: 0 nf_ct_proto_mutex irq_context: 0 ebt_mutex irq_context: 0 ebt_mutex fs_reclaim irq_context: 0 ebt_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ebt_mutex pool_lock#2 irq_context: 0 dsa_tag_drivers_lock irq_context: 0 rtnl_mutex &tn->lock irq_context: 0 protocol_list_lock irq_context: 0 linkfail_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rose_neigh_list_lock irq_context: 0 proto_tab_lock#2 irq_context: 0 bt_proto_lock irq_context: 0 bt_proto_lock pool_lock#2 irq_context: 0 bt_proto_lock &dir->lock irq_context: 0 bt_proto_lock &obj_hash[i].lock irq_context: 0 bt_proto_lock chan_list_lock irq_context: 0 bt_proto_lock &obj_hash[i].lock pool_lock irq_context: 0 bt_proto_lock l2cap_sk_list.lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP slock-AF_BLUETOOTH-BTPROTO_L2CAP irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP chan_list_lock irq_context: 0 slock-AF_BLUETOOTH-BTPROTO_L2CAP irq_context: 0 rfcomm_wq.lock irq_context: 0 rfcomm_mutex irq_context: 0 auth_domain_lock irq_context: 0 registered_mechs_lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock &rq->__lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (crypto_chain).rwsem rcu_read_lock &rq->__lock irq_context: 0 (crypto_chain).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#20 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#20 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 atm_dev_notify_chain.lock irq_context: 0 genl_mutex irq_context: 0 proto_tab_lock#3 irq_context: 0 vlan_ioctl_mutex irq_context: 0 pernet_ops_rwsem (console_sem).lock irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner_lock irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner &port_lock_key irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner console_owner_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_read_lock &ndev->lock irq_context: 0 rds_info_lock irq_context: 0 rds_trans_sem irq_context: 0 rds_trans_sem (console_sem).lock irq_context: 0 rds_trans_sem console_lock console_srcu console_owner_lock irq_context: 0 rds_trans_sem console_lock console_srcu console_owner irq_context: 0 rds_trans_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 rds_trans_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 &id_priv->lock irq_context: 0 lock#7 irq_context: 0 lock#7 fs_reclaim irq_context: 0 lock#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 lock#7 pool_lock#2 irq_context: 0 lock#7 &xa->xa_lock#15 irq_context: 0 lock#7 &xa->xa_lock#15 pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem wq_pool_mutex irq_context: 0 pernet_ops_rwsem wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem pcpu_lock irq_context: 0 pernet_ops_rwsem &list->lock#4 irq_context: 0 pernet_ops_rwsem &dir->lock#2 irq_context: 0 pernet_ops_rwsem ptype_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-clock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnettable->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex irq_context: 0 smc_wr_rx_hash_lock irq_context: 0 v9fs_trans_lock irq_context: 0 pernet_ops_rwsem &this->receive_lock irq_context: 0 &x->wait#16 &p->pi_lock irq_context: 0 &x->wait#16 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#16 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 lowpan_nhc_lock irq_context: 0 rcu_read_lock quarantine_lock irq_context: 0 ovs_mutex irq_context: 0 pernet_ops_rwsem once_lock irq_context: 0 pernet_ops_rwsem once_lock crngs.lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 kernfs_idr_lock &obj_hash[i].lock irq_context: 0 kernfs_idr_lock pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock pool_lock irq_context: 0 kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock purge_vmap_area_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 tasklist_lock &base->lock irq_context: 0 tasklist_lock &base->lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock pool_lock#2 irq_context: 0 uevent_sock_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 uevent_sock_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 uevent_sock_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (debug_obj_work).work pool_lock#2 irq_context: 0 &root->kernfs_rwsem &meta->lock irq_context: 0 &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 key_types_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem fs_reclaim irq_context: 0 key_types_sem asymmetric_key_parsers_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 key_types_sem asymmetric_key_parsers_sem pool_lock#2 irq_context: 0 key_types_sem asymmetric_key_parsers_sem &c->lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &____s->seqcount irq_context: 0 key_types_sem asymmetric_key_parsers_sem crypto_alg_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem &obj_hash[i].lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem crypto_alg_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem fs_reclaim irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem pool_lock#2 irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem kthread_create_lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &p->pi_lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &p->pi_lock &rq->__lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &rq->__lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &x->wait irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &obj_hash[i].lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &x->wait#20 irq_context: 0 key_types_sem asymmetric_key_parsers_sem &base->lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &base->lock &obj_hash[i].lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &rq->__lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 key_types_sem asymmetric_key_parsers_sem (&timer.timer) irq_context: 0 key_types_sem &type->lock_class irq_context: 0 key_types_sem &type->lock_class fs_reclaim irq_context: 0 key_types_sem &type->lock_class fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 key_types_sem &type->lock_class pool_lock#2 irq_context: 0 key_types_sem &type->lock_class key_user_lock irq_context: 0 key_types_sem &type->lock_class crngs.lock irq_context: 0 key_types_sem &type->lock_class key_serial_lock irq_context: 0 key_types_sem &type->lock_class key_construction_mutex irq_context: 0 key_types_sem &type->lock_class key_construction_mutex &obj_hash[i].lock irq_context: 0 key_types_sem &type->lock_class key_construction_mutex pool_lock#2 irq_context: 0 key_types_sem &type->lock_class ima_keys_lock irq_context: 0 key_types_sem &obj_hash[i].lock irq_context: 0 key_types_sem pool_lock#2 irq_context: 0 slab_mutex lock irq_context: 0 slab_mutex lock kernfs_idr_lock irq_context: 0 slab_mutex &root->kernfs_rwsem irq_context: 0 slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 slab_mutex &k->list_lock irq_context: 0 slab_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 slab_mutex lock kernfs_idr_lock &c->lock irq_context: 0 slab_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 slab_mutex &obj_hash[i].lock irq_context: 0 slab_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 slab_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 slab_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 slab_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 slab_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 slab_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &pcp->lock &zone->lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex crypto_alg_sem irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock fs_reclaim irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock free_vmap_area_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock vmap_area_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock init_mm.page_table_lock irq_context: 0 &mm->page_table_lock irq_context: 0 ptlock_ptr(ptdesc)#2 irq_context: 0 rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex crngs.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &dir->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock irq_context: 0 (wq_completion)events netstamp_work irq_context: 0 (wq_completion)events netstamp_work cpu_hotplug_lock irq_context: 0 (wq_completion)events netstamp_work cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events netstamp_work cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex kthread_create_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait#21 irq_context: 0 &x->wait#21 irq_context: 0 &x->wait#21 &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &local->services_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC pool_lock#2 irq_context: 0 pernet_ops_rwsem &rxnet->conn_lock irq_context: 0 pernet_ops_rwsem &call->waitq irq_context: 0 pernet_ops_rwsem &rx->call_lock irq_context: 0 pernet_ops_rwsem &rxnet->call_lock irq_context: 0 bio_slab_lock slab_mutex &root->kernfs_rwsem irq_context: 0 bio_slab_lock slab_mutex &k->list_lock irq_context: 0 bio_slab_lock slab_mutex lock irq_context: 0 bio_slab_lock slab_mutex lock kernfs_idr_lock irq_context: 0 bio_slab_lock slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 bio_slab_lock slab_mutex lock kernfs_idr_lock pool_lock#2 irq_context: softirq (&rxnet->peer_keepalive_timer) irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 init_user_ns.keyring_sem irq_context: 0 init_user_ns.keyring_sem key_user_lock irq_context: 0 init_user_ns.keyring_sem root_key_user.lock irq_context: 0 init_user_ns.keyring_sem fs_reclaim irq_context: 0 init_user_ns.keyring_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 init_user_ns.keyring_sem pool_lock#2 irq_context: 0 init_user_ns.keyring_sem crngs.lock irq_context: 0 init_user_ns.keyring_sem key_serial_lock irq_context: 0 init_user_ns.keyring_sem key_construction_mutex irq_context: 0 init_user_ns.keyring_sem &type->lock_class irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock fs_reclaim irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock pool_lock#2 irq_context: 0 (wq_completion)krxrpcd irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock &c->lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock &____s->seqcount irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock root_key_user.lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &rxnet->peer_hash_lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &obj_hash[i].lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &base->lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex keyring_name_lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &base->lock &obj_hash[i].lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex &obj_hash[i].lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex pool_lock#2 irq_context: 0 init_user_ns.keyring_sem keyring_serialise_link_lock irq_context: 0 init_user_ns.keyring_sem key_construction_mutex keyring_name_lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock &obj_hash[i].lock irq_context: 0 template_list irq_context: 0 idr_lock irq_context: 0 key_types_sem &type->lock_class &c->lock irq_context: 0 key_types_sem &type->lock_class &____s->seqcount irq_context: 0 ima_extend_list_mutex irq_context: 0 ima_extend_list_mutex fs_reclaim irq_context: 0 ima_extend_list_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ima_extend_list_mutex pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 pci_bus_sem irq_context: 0 clk_debug_lock pin_fs_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound deferred_probe_work irq_context: 0 (wq_completion)events_unbound deferred_probe_work deferred_probe_mutex irq_context: 0 deferred_probe_work irq_context: 0 &x->wait#10 irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 console_mutex &root->kernfs_rwsem irq_context: 0 console_mutex kernfs_notify_lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 console_mutex (console_sem).lock irq_context: 0 console_mutex console_lock console_srcu console_owner_lock irq_context: 0 console_mutex console_lock console_srcu console_owner irq_context: 0 console_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 console_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 k-sk_lock-AF_INET irq_context: 0 k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 k-sk_lock-AF_INET &table->hash[i].lock irq_context: 0 k-sk_lock-AF_INET &table->hash[i].lock k-clock-AF_INET irq_context: 0 k-sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 k-slock-AF_INET#2 irq_context: 0 k-sk_lock-AF_INET6 irq_context: 0 k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 k-slock-AF_INET6 irq_context: 0 k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 power_off_handler_list.lock irq_context: 0 reg_requests_lock irq_context: 0 (wq_completion)events reg_work irq_context: 0 (wq_completion)events reg_work rtnl_mutex irq_context: 0 (wq_completion)events reg_work rtnl_mutex reg_requests_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events reg_work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events reg_work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events reg_work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events reg_work rtnl_mutex reg_pending_beacons_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rdev->wiphy.mtx irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rdev->wiphy.mtx reg_requests_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &base->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &fw_cache.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &fw_cache.lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &c->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) async_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) init_task.alloc_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) init_task.alloc_lock init_fs.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) init_task.alloc_lock init_fs.lock &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rcu_read_lock &____s->seqcount#6 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key &dentry->d_lock &wq irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) (console_sem).lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem usermodehelper_disabled_waitq.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &c->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &x->wait#9 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &k->list_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &k->list_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem lock kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem bus_type_sem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem sysfs_symlink_target_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &dev->power.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dpm_list_mtx irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem subsys mutex#73 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem subsys mutex#73 &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fw_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem running_helpers_waitq.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &x->wait#22 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &base->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 detector_work irq_context: 0 &wq->mutex &x->wait#10 irq_context: 0 rcu_read_lock &pool->lock (worker)->lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 acpi_gpio_deferred_req_irqs_lock irq_context: 0 gpd_list_lock irq_context: 0 rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 rcu_state.exp_mutex pool_lock#2 irq_context: 0 &sb->s_type->i_lock_key#2 irq_context: 0 tomoyo_ss &c->lock irq_context: 0 tomoyo_ss &____s->seqcount irq_context: 0 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 tomoyo_ss &obj_hash[i].lock irq_context: 0 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 tomoyo_ss tomoyo_log_lock irq_context: 0 tomoyo_ss tomoyo_log_wait.lock irq_context: 0 cdev_lock irq_context: 0 tty_mutex (console_sem).lock irq_context: 0 tty_mutex console_lock irq_context: 0 tty_mutex fs_reclaim irq_context: 0 tty_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tty_mutex pool_lock#2 irq_context: 0 tty_mutex tty_ldiscs_lock irq_context: 0 tty_mutex &obj_hash[i].lock irq_context: 0 tty_mutex &obj_hash[i].lock pool_lock irq_context: 0 tty_mutex &k->list_lock irq_context: 0 tty_mutex &k->k_lock irq_context: 0 tty_mutex &tty->legacy_mutex irq_context: 0 tty_mutex &tty->legacy_mutex &tty->read_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->write_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem fs_reclaim irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem pool_lock#2 irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem free_vmap_area_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem vmap_area_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &____s->seqcount irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem init_mm.page_table_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &tty->write_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &tty->read_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &tty->termios_rwsem irq_context: 0 &tty->legacy_mutex irq_context: 0 &tty->legacy_mutex &tty->files_lock irq_context: 0 &tty->legacy_mutex &port->lock irq_context: 0 &tty->legacy_mutex &port->mutex irq_context: 0 &tty->legacy_mutex &port->mutex fs_reclaim irq_context: 0 &tty->legacy_mutex &port->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &tty->legacy_mutex &port->mutex &____s->seqcount irq_context: 0 &tty->legacy_mutex &port->mutex &port_lock_key irq_context: 0 &tty->legacy_mutex &port->mutex pool_lock#2 irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock fs_reclaim irq_context: 0 &tty->legacy_mutex &port->mutex register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &tty->legacy_mutex &port->mutex register_lock pool_lock#2 irq_context: 0 &tty->legacy_mutex &port->mutex register_lock proc_inum_ida.xa_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock &c->lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock &____s->seqcount irq_context: 0 &tty->legacy_mutex &port->mutex &irq_desc_lock_class irq_context: 0 &tty->legacy_mutex &port->mutex proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port->mutex proc_inum_ida.xa_lock irq_context: 0 &tty->legacy_mutex &port->mutex proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port_lock_key irq_context: 0 sb_writers#2 irq_context: 0 sb_writers#2 mount_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 rename_lock.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &obj_hash[i].lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_wait.lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &s->s_inode_list_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tk_core.seq.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &wq#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key tk_core.seq.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &c->lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &____s->seqcount irq_context: 0 tomoyo_ss file_systems_lock irq_context: 0 tomoyo_ss fs_reclaim irq_context: 0 tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tomoyo_ss rcu_read_lock init_fs.seq.seqcount irq_context: 0 &type->s_umount_key#24/1 irq_context: 0 &type->s_umount_key#24/1 fs_reclaim irq_context: 0 &type->s_umount_key#24/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#24/1 &c->lock irq_context: 0 &type->s_umount_key#24/1 &____s->seqcount irq_context: 0 &type->s_umount_key#24/1 pool_lock#2 irq_context: 0 &type->s_umount_key#24/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#24/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#24/1 shrinker_mutex irq_context: 0 &type->s_umount_key#24/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#24/1 sb_lock irq_context: 0 &type->s_umount_key#24/1 inode_hash_lock irq_context: 0 &type->s_umount_key#24/1 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#24/1 bdev_lock irq_context: 0 &type->s_umount_key#24/1 &disk->open_mutex irq_context: 0 &type->s_umount_key#24/1 &disk->open_mutex bdev_lock irq_context: 0 &type->s_umount_key#24/1 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &type->s_umount_key#24/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#24/1 &wq->mutex irq_context: 0 &type->s_umount_key#24/1 &wq->mutex &pool->lock irq_context: 0 &type->s_umount_key#24/1 kthread_create_lock irq_context: 0 &type->s_umount_key#24/1 &p->pi_lock irq_context: 0 &type->s_umount_key#24/1 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#24/1 &x->wait irq_context: 0 &type->s_umount_key#24/1 &rq->__lock irq_context: 0 &type->s_umount_key#24/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#24/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#24/1 &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#24/1 wq_pool_mutex irq_context: 0 &type->s_umount_key#24/1 wq_pool_mutex &wq->mutex irq_context: 0 &type->s_umount_key#24/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#24/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#24/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#24/1 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &type->s_umount_key#24/1 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#24/1 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->s_umount_key#24/1 lock#4 irq_context: 0 &type->s_umount_key#24/1 &mapping->i_private_lock irq_context: 0 &type->s_umount_key#24/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#24/1 bit_wait_table + i irq_context: hardirq bit_wait_table + i irq_context: hardirq bit_wait_table + i &p->pi_lock irq_context: hardirq bit_wait_table + i &p->pi_lock &rq->__lock irq_context: hardirq bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &type->s_umount_key#24/1 &wq->mutex &x->wait#10 irq_context: 0 &type->s_umount_key#24/1 wq_mayday_lock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock irq_context: 0 &type->s_umount_key#24/1 wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock (worker)->lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#24/1 &sbi->old_work_lock irq_context: 0 &type->s_umount_key#24/1 (work_completion)(&(&sbi->old_work)->work) irq_context: 0 &type->s_umount_key#24/1 &x->wait#23 irq_context: 0 &disk->open_mutex bdev_lock irq_context: 0 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &xa->xa_lock#5 irq_context: 0 sb_lock &obj_hash[i].lock irq_context: 0 sb_lock pool_lock#2 irq_context: 0 &type->s_umount_key#25/1 irq_context: 0 &type->s_umount_key#25/1 fs_reclaim irq_context: 0 &type->s_umount_key#25/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#25/1 pool_lock#2 irq_context: 0 &type->s_umount_key#25/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#25/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#25/1 shrinker_mutex irq_context: 0 &type->s_umount_key#25/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#25/1 sb_lock irq_context: 0 &type->s_umount_key#25/1 inode_hash_lock irq_context: 0 &type->s_umount_key#25/1 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#25/1 bdev_lock irq_context: 0 &type->s_umount_key#25/1 &disk->open_mutex irq_context: 0 &type->s_umount_key#25/1 &disk->open_mutex bdev_lock irq_context: 0 &type->s_umount_key#25/1 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &type->s_umount_key#25/1 &c->lock irq_context: 0 &type->s_umount_key#25/1 &____s->seqcount irq_context: 0 &type->s_umount_key#25/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#25/1 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#25/1 lock#4 irq_context: 0 &type->s_umount_key#25/1 &mapping->i_private_lock irq_context: 0 &type->s_umount_key#25/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#25/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#25/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#25/1 bit_wait_table + i irq_context: 0 &type->s_umount_key#25/1 &rq->__lock irq_context: 0 &type->s_umount_key#25/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq bit_wait_table + i &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#25/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#25/1 &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#25/1 &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#25/1 &sb->s_type->i_lock_key#3 &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &type->s_umount_key#25/1 lock#5 irq_context: 0 &type->s_umount_key#25/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#25/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#25/1 crypto_alg_sem irq_context: 0 &type->s_umount_key#25/1 lock#2 irq_context: 0 &type->s_umount_key#25/1 lock#2 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) lock#4 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#25/1 lock#2 &rq->__lock irq_context: 0 &type->s_umount_key#25/1 lock#2 (work_completion)(work) irq_context: 0 &type->s_umount_key#25/1 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#25/1 &x->wait#23 irq_context: 0 &type->s_umount_key#26/1 irq_context: 0 &type->s_umount_key#26/1 fs_reclaim irq_context: 0 &type->s_umount_key#26/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#26/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#26/1 &c->lock irq_context: 0 &type->s_umount_key#26/1 &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 shrinker_mutex irq_context: 0 &type->s_umount_key#26/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#26/1 sb_lock irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#26/1 bdev_lock irq_context: 0 &type->s_umount_key#26/1 &disk->open_mutex irq_context: 0 &type->s_umount_key#26/1 &disk->open_mutex bdev_lock irq_context: 0 &type->s_umount_key#26/1 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &type->s_umount_key#26/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#26/1 lock#4 irq_context: 0 &type->s_umount_key#26/1 &mapping->i_private_lock irq_context: 0 &type->s_umount_key#26/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#26/1 bit_wait_table + i irq_context: 0 &type->s_umount_key#26/1 &rq->__lock irq_context: 0 &type->s_umount_key#26/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#26/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#26/1 lock#4 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#26/1 lock#5 irq_context: 0 &type->s_umount_key#26/1 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#26/1 crypto_alg_sem irq_context: 0 &type->s_umount_key#26/1 pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 percpu_counters_lock irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#26/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#26/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock &retval->lock irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq irq_context: 0 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rename_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key namespace_sem &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem &obj_hash[i].lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &pcp->lock &zone->lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 rcu_state.barrier_mutex irq_context: 0 rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: hardirq rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 rcu_state.barrier_mutex &x->wait#24 irq_context: 0 rcu_state.barrier_mutex &rq->__lock irq_context: 0 rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback &x->wait#24 irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#3 lock#4 &lruvec->lru_lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &retval->lock irq_context: 0 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq#2 irq_context: 0 &mm->mmap_lock fs_reclaim irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock irq_context: 0 &mm->mmap_lock fs_reclaim irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->page_table_lock irq_context: 0 &mm->mmap_lock &c->lock irq_context: 0 &mm->mmap_lock pool_lock#2 irq_context: 0 &mm->mmap_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &mm->page_table_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sig->cred_guard_mutex irq_context: 0 &sig->cred_guard_mutex fs_reclaim irq_context: 0 &sig->cred_guard_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex pool_lock#2 irq_context: 0 &sig->cred_guard_mutex init_fs.lock irq_context: 0 &sig->cred_guard_mutex &p->pi_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock lock#4 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &c->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 &sig->cred_guard_mutex &folio_wait_table[i] irq_context: 0 &sig->cred_guard_mutex &rq->__lock irq_context: 0 &sig->cred_guard_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex tomoyo_ss pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &c->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock init_fs.seq.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_log_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_log_wait.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss quarantine_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex binfmt_lock irq_context: 0 &sig->cred_guard_mutex init_binfmt_misc.entries_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock init_fs.seq.seqcount irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 inode_hash_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 inode_hash_lock &s->s_inode_list_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 lock#4 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->xattr_sem irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->xattr_sem lock#4 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq irq_context: 0 &sig->cred_guard_mutex &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &c->lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq#2 irq_context: 0 &sig->cred_guard_mutex &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &c->lock irq_context: 0 &sig->cred_guard_mutex &____s->seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock lock#4 &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &retval->lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &p->alloc_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &p->alloc_lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &sighand->siglock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &newf->file_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock batched_entropy_u64.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock batched_entropy_u64.lock crngs.lock irq_context: 0 batched_entropy_u16.lock irq_context: 0 batched_entropy_u16.lock crngs.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &____s->seqcount irq_context: 0 &mm->mmap_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock ptlock_ptr(ptdesc)#2/1 irq_context: 0 &mm->mmap_lock lock#4 irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &mm->mmap_lock lock#5 irq_context: 0 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &mm->page_table_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem pool_lock#2 irq_context: 0 &mm->mmap_lock &c->lock irq_context: 0 mapping.invalidate_lock irq_context: 0 mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock &____s->seqcount irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 irq_context: 0 mapping.invalidate_lock lock#4 irq_context: 0 mapping.invalidate_lock lock#4 &lruvec->lru_lock irq_context: 0 mapping.invalidate_lock &ei->i_es_lock irq_context: 0 mapping.invalidate_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock &c->lock irq_context: 0 mapping.invalidate_lock tk_core.seq.seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock rcu_read_lock &retval->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] irq_context: 0 mapping.invalidate_lock &rq->__lock irq_context: 0 mapping.invalidate_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &rq->__lock pool_lock#2 irq_context: 0 binfmt_lock irq_context: 0 &fsnotify_mark_srcu irq_context: 0 &xa->xa_lock#9 irq_context: 0 mapping.invalidate_lock rcu_read_lock &c->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock fs_reclaim irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &type->i_mutex_dir_key#3 &dentry->d_lock &wq irq_context: 0 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 &type->i_mutex_dir_key#3 &dentry->d_lock &wq#2 irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem irq_context: 0 mapping.invalidate_lock &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock &ei->i_data_sem pool_lock#2 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock mapping.invalidate_lock irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &c->lock irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &____s->seqcount irq_context: 0 mapping.invalidate_lock batched_entropy_u8.lock irq_context: 0 mapping.invalidate_lock kfence_freelist_lock irq_context: softirq &(&gc_work->dwork)->timer irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &____s->seqcount#7 irq_context: softirq &(&ipvs->defense_work)->timer irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &s->s_inode_list_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &ipvs->dropentry_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &ipvs->droppacket_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &ipvs->securetcp_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &base->lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &vma->vm_lock->lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &c->lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &____s->seqcount irq_context: 0 mapping.invalidate_lock &ei->i_es_lock key#2 irq_context: 0 &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 crngs.lock base_crng.lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 tomoyo_ss quarantine_lock irq_context: 0 &port->mutex irq_context: 0 &tty->ldisc_sem irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &mm->mmap_lock irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &port->mutex irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &tty->ldisc_sem &tty->write_wait irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &tty->ldisc_sem &tty->read_wait irq_context: 0 task_group_lock irq_context: 0 &sighand->siglock &p->pi_lock irq_context: 0 &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 lock#4 irq_context: 0 &type->s_umount_key#27/1 irq_context: 0 &type->s_umount_key#27/1 fs_reclaim irq_context: 0 &type->s_umount_key#27/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 pool_lock#2 irq_context: 0 &type->s_umount_key#27/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#27/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#27/1 shrinker_mutex irq_context: 0 &type->s_umount_key#27/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#27/1 sb_lock irq_context: 0 &type->s_umount_key#27/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#27/1 &c->lock irq_context: 0 &type->s_umount_key#27/1 &____s->seqcount irq_context: 0 &type->s_umount_key#27/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_lock_key#23 irq_context: 0 &type->s_umount_key#27/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#27/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 pool_lock#2 irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 &type->s_umount_key#27/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_mutex_key#9 irq_context: 0 &sb->s_type->i_mutex_key#9 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 proc_subdir_lock irq_context: 0 &sb->s_type->i_mutex_key#9 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 irq_context: 0 tomoyo_ss rcu_read_lock rename_lock.seqcount irq_context: 0 &p->lock irq_context: 0 &p->lock fs_reclaim irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock pool_lock#2 irq_context: 0 &p->lock &mm->mmap_lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 &type->s_umount_key#28/1 irq_context: 0 &type->s_umount_key#28/1 fs_reclaim irq_context: 0 &type->s_umount_key#28/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#28/1 pool_lock#2 irq_context: 0 &type->s_umount_key#28/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#28/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#28/1 shrinker_mutex irq_context: 0 &type->s_umount_key#28/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#28/1 sb_lock irq_context: 0 &type->s_umount_key#28/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 irq_context: 0 &type->s_umount_key#28/1 &sb->s_type->i_lock_key#24 irq_context: 0 &type->s_umount_key#28/1 &sb->s_type->i_lock_key#24 &dentry->d_lock irq_context: 0 &type->s_umount_key#28/1 crngs.lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#28/1 &dentry->d_lock irq_context: 0 &root->kernfs_iattr_rwsem irq_context: 0 &type->i_mutex_dir_key#4 irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex irq_context: 0 &type->i_mutex_dir_key#4 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock &wq irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq irq_context: 0 &ent->pde_unload_lock irq_context: 0 &p->lock &c->lock irq_context: 0 &p->lock &____s->seqcount irq_context: 0 &p->lock file_systems_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount pool_lock#2 irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &type->s_umount_key#29 irq_context: 0 &type->s_umount_key#29 &x->wait#23 irq_context: 0 &type->s_umount_key#29 shrinker_mutex irq_context: 0 &type->s_umount_key#29 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#29 pool_lock#2 irq_context: 0 &type->s_umount_key#29 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#29 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 &type->s_umount_key#29 rename_lock.seqcount irq_context: 0 &type->s_umount_key#29 &dentry->d_lock irq_context: 0 &type->s_umount_key#29 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#29 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock irq_context: 0 &type->s_umount_key#29 &sb->s_type->i_lock_key#23 irq_context: 0 &type->s_umount_key#29 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#29 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#29 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#29 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#29 &dentry->d_lock pool_lock#2 irq_context: 0 &type->s_umount_key#29 &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#29 &obj_hash[i].lock pool_lock irq_context: 0 unnamed_dev_ida.xa_lock irq_context: 0 krc.lock irq_context: 0 &x->wait#25 irq_context: 0 &net->unx.table.locks[i] irq_context: 0 &sb->s_type->i_lock_key#8 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#10 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->unx.table.locks[i] irq_context: 0 &sb->s_type->i_mutex_key#10 &u->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &u->lock clock-AF_UNIX irq_context: 0 &sb->s_type->i_mutex_key#10 &u->peer_wait irq_context: 0 &sb->s_type->i_mutex_key#10 rlock-AF_UNIX irq_context: 0 &sb->s_type->i_mutex_key#10 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &dir->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &obj_hash[i].lock irq_context: 0 &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &fs->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &____s->seqcount#3 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &____s->seqcount#3 irq_context: 0 &sig->cred_guard_mutex lock#4 irq_context: 0 &sig->cred_guard_mutex &sb->s_type->i_mutex_key#8 irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->alloc_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->alloc_lock cpu_asid_lock irq_context: 0 &sig->wait_chldexit irq_context: 0 tasklist_lock irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock irq_context: 0 &mm->mmap_lock &p->alloc_lock irq_context: 0 &mm->mmap_lock lock#4 irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &mm->mmap_lock lock#5 irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &lruvec->lru_lock irq_context: 0 tasklist_lock &sighand->siglock &____s->seqcount irq_context: 0 tasklist_lock &sighand->siglock &c->lock irq_context: 0 tasklist_lock &sighand->siglock pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &____s->seqcount#4 irq_context: 0 &prev->lock irq_context: 0 &(&sig->stats_lock)->lock irq_context: 0 &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: hardirq bit_wait_table + i &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 irq_context: 0 sb_writers#3 mount_lock irq_context: 0 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &obj_hash[i].lock pool_lock irq_context: 0 &vma->vm_lock->lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#4 mount_lock irq_context: 0 sb_writers#4 tk_core.seq.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_lock_key#23 irq_context: 0 sb_writers#4 &wb->list_lock irq_context: 0 sb_writers#4 &wb->list_lock &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_mutex_key#9 &p->alloc_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &pid->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq irq_context: 0 &p->alloc_lock &fs->lock &dentry->d_lock irq_context: 0 &p->lock namespace_sem irq_context: 0 &p->lock namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &p->lock namespace_sem rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &type->s_umount_key#30 irq_context: 0 &type->s_umount_key#30 &lru->node[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#30 &sb->s_type->i_lock_key#22 &lru->node[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &rsp->gp_wait irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &x->wait#2 irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback put_task_map-wait-type-override task_group_lock irq_context: softirq rcu_callback &x->wait#2 irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock &rq->__lock irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 &sem->waiters irq_context: 0 &type->s_umount_key#30 &rsp->gp_wait irq_context: 0 &type->s_umount_key#30 &rsp->gp_wait &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &rsp->gp_wait pool_lock#2 irq_context: 0 &type->s_umount_key#30 &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 &p->alloc_lock irq_context: 0 &type->s_umount_key#30 (work_completion)(&sbi->s_sb_upd_work) irq_context: 0 &type->s_umount_key#30 &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 key#3 irq_context: 0 &type->s_umount_key#30 key#4 irq_context: 0 &type->s_umount_key#30 &sbi->s_error_lock irq_context: 0 &type->s_umount_key#30 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 batched_entropy_u8.lock irq_context: 0 &type->s_umount_key#30 kfence_freelist_lock irq_context: 0 &type->s_umount_key#30 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &base->lock irq_context: 0 &type->s_umount_key#30 &base->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#30 bit_wait_table + i irq_context: 0 &type->s_umount_key#30 &rq->__lock irq_context: 0 &type->s_umount_key#30 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 ext4_li_mtx irq_context: 0 &type->s_umount_key#30 ext4_li_mtx fs_reclaim irq_context: 0 &type->s_umount_key#30 ext4_li_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 ext4_li_mtx pool_lock#2 irq_context: 0 &type->s_umount_key#30 ext4_li_mtx batched_entropy_u16.lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &eli->li_list_mtx irq_context: 0 &type->s_umount_key#30 ext4_li_mtx kthread_create_lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &p->pi_lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &x->wait irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &rq->__lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &obj_hash[i].lock irq_context: 0 &eli->li_list_mtx irq_context: 0 &type->s_umount_key#30 (console_sem).lock irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner_lock irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner &port_lock_key irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &type->s_umount_key#30 mount_lock irq_context: 0 &type->s_umount_key#30 mount_lock mount_lock.seqcount irq_context: 0 &type->s_umount_key#30 mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &pid->lock irq_context: 0 rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 rename_lock.seqcount irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock irq_context: 0 &pid->lock irq_context: 0 sb_writers#3 tk_core.seq.seqcount irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &____s->seqcount irq_context: 0 sb_writers#3 &c->lock irq_context: 0 sb_writers#3 pool_lock#2 irq_context: 0 sb_writers#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 jbd2_handle &c->lock irq_context: 0 sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &wb->list_lock irq_context: 0 sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &wb->work_lock irq_context: 0 sb_writers#3 &wb->work_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &wb->work_lock &base->lock irq_context: 0 sb_writers#3 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback &cfs_rq->removed.lock irq_context: softirq rcu_callback &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &meta_group_info[i]->alloc_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle inode_hash_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle batched_entropy_u32.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle smack_known_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle smack_known_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_internal rcu_read_lock init_fs.seq.seqcount irq_context: 0 sb_internal rcu_read_lock rcu_read_lock mount_lock.seqcount irq_context: 0 sb_internal rcu_read_lock rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_internal mmu_notifier_invalidate_range_start irq_context: 0 sb_internal pool_lock#2 irq_context: 0 sb_internal &journal->j_state_lock irq_context: 0 sb_internal jbd2_handle irq_context: 0 sb_internal jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_internal jbd2_handle pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ret->b_state_lock irq_context: 0 sb_internal jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_internal jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_internal jbd2_handle &mapping->i_private_lock irq_context: 0 sb_internal jbd2_handle &journal->j_wait_updates irq_context: 0 sb_internal &obj_hash[i].lock irq_context: 0 &ei->i_data_sem irq_context: 0 &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 &sighand->siglock hrtimer_bases.lock irq_context: 0 &sighand->siglock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &sighand->siglock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 file_rwsem irq_context: 0 file_rwsem &ctx->flc_lock irq_context: 0 file_rwsem &ctx->flc_lock &fll->lock irq_context: 0 &ctx->flc_lock irq_context: 0 &sig->cred_guard_mutex tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mount_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex sb_writers#3 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 &mm->mmap_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 &vma->vm_lock->lock &rq->__lock irq_context: 0 &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock &pcp->lock &zone->lock irq_context: 0 mapping.invalidate_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock &rq->__lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &p->alloc_lock irq_context: 0 &type->s_umount_key#31/1 irq_context: 0 &type->s_umount_key#31/1 fs_reclaim irq_context: 0 &type->s_umount_key#31/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 pool_lock#2 irq_context: 0 &type->s_umount_key#31/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#31/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#31/1 shrinker_mutex irq_context: 0 &type->s_umount_key#31/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#31/1 sb_lock irq_context: 0 &type->s_umount_key#31/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#31/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_lock_key#25 irq_context: 0 &type->s_umount_key#31/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#31/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_lock_key#25 &dentry->d_lock irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 fs_reclaim irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 pool_lock#2 irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &dentry->d_lock irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &sb->s_type->i_lock_key#25 irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &sb->s_type->i_lock_key#25 &dentry->d_lock irq_context: 0 &type->s_umount_key#31/1 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rename_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &type->s_umount_key/1 fs_reclaim irq_context: 0 &type->s_umount_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#32 irq_context: 0 &type->s_umount_key#32 sb_lock irq_context: 0 &type->s_umount_key#32 fs_reclaim irq_context: 0 &type->s_umount_key#32 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#32 pool_lock#2 irq_context: 0 &type->s_umount_key#32 &dentry->d_lock irq_context: 0 &type->s_umount_key#32 &lru->node[i].lock irq_context: 0 &type->s_umount_key#32 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#32 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rename_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &p->lock &pcp->lock &zone->lock irq_context: 0 &p->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#33 irq_context: 0 &type->s_umount_key#33 sb_lock irq_context: 0 &type->s_umount_key#33 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem &c->lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem &____s->seqcount irq_context: 0 &type->s_umount_key#34 irq_context: 0 &type->s_umount_key#34 sb_lock irq_context: 0 &type->s_umount_key#34 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 sysctl_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &____s->seqcount irq_context: 0 rcu_read_lock &dentry->d_lock sysctl_lock irq_context: 0 &type->s_umount_key#35/1 irq_context: 0 &type->s_umount_key#35/1 fs_reclaim irq_context: 0 &type->s_umount_key#35/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#35/1 pool_lock#2 irq_context: 0 &type->s_umount_key#35/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#35/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#35/1 shrinker_mutex irq_context: 0 &type->s_umount_key#35/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#35/1 sb_lock irq_context: 0 &type->s_umount_key#35/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#35/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#35/1 &sb->s_type->i_lock_key#26 irq_context: 0 &type->s_umount_key#35/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#35/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#35/1 &sb->s_type->i_lock_key#26 &dentry->d_lock irq_context: 0 &type->s_umount_key#35/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: softirq rcu_callback &rsp->gp_wait irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &____s->seqcount irq_context: 0 &type->s_umount_key#36 irq_context: 0 &type->s_umount_key#36 sb_lock irq_context: 0 &type->s_umount_key#36 &dentry->d_lock irq_context: 0 redirect_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock fs_reclaim irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock pool_lock#2 irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &mm->mmap_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &tty->write_wait irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key &port->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key &tty->write_wait irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key &tty->write_wait &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock hrtimer_bases.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &port->lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &tty->write_wait irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &port_lock_key irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &port_lock_key &dev->power.lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->files_lock irq_context: 0 &tty->ldisc_sem &tty->write_wait irq_context: 0 &type->s_umount_key#37/1 irq_context: 0 &type->s_umount_key#37/1 fs_reclaim irq_context: 0 &type->s_umount_key#37/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#37/1 pool_lock#2 irq_context: 0 &type->s_umount_key#37/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#37/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#37/1 shrinker_mutex irq_context: 0 &type->s_umount_key#37/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#37/1 sb_lock irq_context: 0 &type->s_umount_key#37/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#37/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#37/1 &sb->s_type->i_lock_key#27 irq_context: 0 &type->s_umount_key#37/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#37/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#37/1 &sb->s_type->i_lock_key#27 &dentry->d_lock irq_context: 0 &type->s_umount_key#37/1 fuse_mutex irq_context: 0 &type->s_umount_key#37/1 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_es_lock key#2 irq_context: 0 &type->s_umount_key#38/1 irq_context: 0 &type->s_umount_key#38/1 fs_reclaim irq_context: 0 &type->s_umount_key#38/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 pool_lock#2 irq_context: 0 &type->s_umount_key#38/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#38/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#38/1 shrinker_mutex irq_context: 0 &type->s_umount_key#38/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#38/1 &c->lock irq_context: 0 &type->s_umount_key#38/1 &____s->seqcount irq_context: 0 &type->s_umount_key#38/1 sb_lock irq_context: 0 &type->s_umount_key#38/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#38/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_lock_key#28 irq_context: 0 &type->s_umount_key#38/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#38/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_lock_key#28 &dentry->d_lock irq_context: 0 &type->s_umount_key#38/1 pstore_sb_lock irq_context: 0 &type->s_umount_key#38/1 pstore_sb_lock &sb->s_type->i_mutex_key#12 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 fs_reclaim irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &zone->lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &____s->seqcount irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex (efivars_lock).lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex fs_reclaim irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex pool_lock#2 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex (efi_runtime_lock).lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &obj_hash[i].lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &rq->__lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &x->wait#12 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &rq->__lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#38/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#39/1 irq_context: 0 &type->s_umount_key#39/1 fs_reclaim irq_context: 0 &type->s_umount_key#39/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#39/1 pool_lock#2 irq_context: 0 &type->s_umount_key#39/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#39/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#39/1 shrinker_mutex irq_context: 0 &type->s_umount_key#39/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#39/1 sb_lock irq_context: 0 &type->s_umount_key#39/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#39/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#39/1 &sb->s_type->i_lock_key#29 irq_context: 0 &type->s_umount_key#39/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#39/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#39/1 &sb->s_type->i_lock_key#29 &dentry->d_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock (kmod_concurrent_max).lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock fs_reclaim irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock pool_lock#2 irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &x->wait#16 irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &rq->__lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uts_sem irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#16 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#16 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#16 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock key irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock pcpu_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock percpu_counters_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock running_helpers_waitq.lock irq_context: 0 &type->s_umount_key#39/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#14 irq_context: 0 &type->s_umount_key#14 sb_lock irq_context: 0 &type->s_umount_key#14 fs_reclaim irq_context: 0 &type->s_umount_key#14 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#14 pool_lock#2 irq_context: 0 &type->s_umount_key#14 &dentry->d_lock irq_context: 0 &type->s_umount_key#14 &lru->node[i].lock irq_context: 0 &type->s_umount_key#14 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#14 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_lock_key irq_context: 0 &type->i_mutex_dir_key#5 irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &wq irq_context: 0 sb_writers#5 irq_context: 0 sb_writers#5 mount_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sbinfo->stat_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &s->s_inode_list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u32.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&cb->timer) irq_context: softirq (&cb->timer) &obj_hash[i].lock irq_context: softirq (&cb->timer) &base->lock irq_context: softirq (&cb->timer) &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &base->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex batched_entropy_u8.lock irq_context: 0 &sig->cred_guard_mutex kfence_freelist_lock irq_context: 0 &mm->mmap_lock sb_writers#3 mount_lock irq_context: 0 &mm->mmap_lock sb_writers#3 tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock sb_writers#3 pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &mm->mmap_lock sb_writers#3 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sbinfo->stat_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &s->s_inode_list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 batched_entropy_u32.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers#5 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &wb->list_lock irq_context: 0 sb_writers#5 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &c->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &____s->seqcount irq_context: 0 &mm->mmap_lock &rq->__lock irq_context: 0 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_lock irq_context: 0 &sig->cred_guard_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &c->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 uts_sem irq_context: 0 uts_sem hostname_poll.wait.lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 &c->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &c->lock irq_context: 0 rcu_read_lock rcu_read_lock mount_lock.seqcount irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &f->f_pos_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mount_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 &f->f_pos_lock &mm->mmap_lock irq_context: 0 &fs->lock &dentry->d_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mm->page_table_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock ptlock_ptr(ptdesc)#2/1 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#5 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#5 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pcpu_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#5 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 &type->i_mutex_dir_key#5 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &xattrs->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &mm->mmap_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &____s->seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &xa->xa_lock#9 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &info->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 irq_context: 0 &type->i_mutex_dir_key#3 &obj_hash[i].lock pool_lock irq_context: 0 &p->alloc_lock &x->wait#25 irq_context: 0 &p->alloc_lock &x->wait#25 &p->pi_lock irq_context: 0 &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock irq_context: 0 &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sighand->siglock &obj_hash[i].lock irq_context: 0 &sighand->siglock pool_lock#2 irq_context: 0 tomoyo_ss &rq->__lock irq_context: 0 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &stopper->lock irq_context: 0 &sig->cred_guard_mutex &stop_pi_lock irq_context: 0 &sig->cred_guard_mutex &stop_pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &x->wait#8 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i] irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i]/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &bsd_socket_locks[i] irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &c->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tk_core.seq.seqcount irq_context: 0 &u->iolock irq_context: 0 &u->iolock rlock-AF_UNIX irq_context: 0 &ei->socket.wq.wait irq_context: 0 key#5 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &anon_vma->rwsem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_es_lock key#6 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem quarantine_lock irq_context: 0 &bsd_socket_locks[i] irq_context: 0 sb_writers tk_core.seq.seqcount irq_context: 0 sb_writers &sb->s_type->i_lock_key#5 irq_context: 0 sb_writers &wb->list_lock irq_context: 0 sb_writers &wb->list_lock &sb->s_type->i_lock_key#5 irq_context: 0 &u->lock irq_context: 0 &u->lock &u->lock/1 irq_context: 0 &u->lock rlock-AF_UNIX irq_context: 0 rcu_read_lock &ei->socket.wq.wait irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &mm->mmap_lock irq_context: 0 &u->iolock &obj_hash[i].lock irq_context: 0 &u->iolock pool_lock#2 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &wb->list_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 syslog_lock irq_context: 0 &u->lock &u->peer_wait irq_context: 0 &u->iolock &u->peer_wait irq_context: 0 &u->iolock &u->peer_wait &p->pi_lock irq_context: 0 &u->iolock &u->peer_wait &p->pi_lock &rq->__lock irq_context: 0 &u->iolock &u->peer_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &u->iolock quarantine_lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_lock_key#14 irq_context: 0 &sb->s_type->i_lock_key#14 &dentry->d_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock irq_context: 0 &pipe->mutex/1 irq_context: 0 &pipe->rd_wait irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &dentry->d_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#5 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &u->iolock &meta->lock irq_context: 0 &u->iolock kfence_freelist_lock irq_context: 0 &mm->mmap_lock &lruvec->lru_lock irq_context: 0 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &____s->seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &pipe->rd_wait irq_context: 0 &pipe->mutex/1 &pipe->wr_wait irq_context: 0 &sig->cred_guard_mutex key irq_context: 0 &sig->cred_guard_mutex pcpu_lock irq_context: 0 &sig->cred_guard_mutex percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 key#9 irq_context: 0 &type->i_mutex_dir_key#5 pool_lock#2 irq_context: 0 &u->lock clock-AF_UNIX irq_context: 0 &u->peer_wait irq_context: 0 rlock-AF_UNIX irq_context: 0 &type->i_mutex_dir_key#5 &c->lock irq_context: 0 &type->i_mutex_dir_key#5 &____s->seqcount irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_node_0 irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 fs_reclaim irq_context: 0 &pipe->mutex/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &pipe->mutex/1 &____s->seqcount irq_context: 0 &pipe->mutex/1 &mm->mmap_lock irq_context: 0 &pipe->rd_wait &p->pi_lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#6 tk_core.seq.seqcount irq_context: 0 sb_writers#6 mount_lock irq_context: 0 &pipe->mutex/1 &rq->__lock irq_context: 0 &pipe->mutex/1 &lock->wait_lock irq_context: 0 &pipe->mutex/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lock->wait_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock sysctl_lock irq_context: 0 sb_writers#4 sysctl_lock irq_context: 0 sb_writers#4 &dentry->d_lock irq_context: 0 sb_writers#4 tomoyo_ss irq_context: 0 sb_writers#4 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 tomoyo_ss rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#4 tomoyo_ss &c->lock irq_context: 0 sb_writers#4 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#4 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#4 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#4 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 sb_writers#4 fs_reclaim irq_context: 0 sb_writers#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 pool_lock#2 irq_context: 0 sb_writers#4 &mm->mmap_lock irq_context: 0 sb_writers#4 &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock &c->lock irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 tomoyo_ss batched_entropy_u8.lock irq_context: 0 tomoyo_ss kfence_freelist_lock irq_context: 0 tomoyo_ss &meta->lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: softirq (&net->can.stattimer) irq_context: softirq (&net->can.stattimer) &obj_hash[i].lock irq_context: softirq (&net->can.stattimer) &base->lock irq_context: softirq (&net->can.stattimer) &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock pool_lock#2 irq_context: 0 nl_table_lock pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK irq_context: 0 sk_lock-AF_NETLINK slock-AF_NETLINK irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 slock-AF_NETLINK irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &net->unx.table.locks[i] irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &bsd_socket_locks[i] irq_context: 0 &u->lock &sk->sk_peer_lock irq_context: 0 &type->i_mutex_dir_key#3 quarantine_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 &mm->mmap_lock sb_writers#3 &wb->list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 lock#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq log_wait.lock &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 mapping.invalidate_lock &ei->i_es_lock key#6 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &xattrs->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &p->alloc_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#4 oom_adj_mutex irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock irq_context: 0 sb_writers#4 oom_adj_mutex &p->alloc_lock irq_context: 0 &sb->s_type->i_lock_key#15 &dentry->d_lock irq_context: 0 &group->mark_mutex irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu irq_context: 0 &group->mark_mutex fs_reclaim irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &group->mark_mutex &____s->seqcount irq_context: 0 &group->mark_mutex &c->lock irq_context: 0 &group->mark_mutex pool_lock#2 irq_context: 0 &group->mark_mutex lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock pool_lock#2 irq_context: 0 &group->mark_mutex ucounts_lock irq_context: 0 &group->mark_mutex &mark->lock irq_context: 0 &group->mark_mutex &mark->lock &fsnotify_mark_srcu irq_context: 0 &group->mark_mutex &mark->lock &fsnotify_mark_srcu &conn->lock irq_context: 0 &group->mark_mutex &mark->lock &conn->lock irq_context: 0 &group->mark_mutex &conn->lock irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key#22 irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key#22 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock pool_lock#2 irq_context: 0 &sk->sk_peer_lock irq_context: 0 &ep->mtx irq_context: 0 epnested_mutex irq_context: 0 epnested_mutex &ep->mtx irq_context: 0 epnested_mutex &ep->mtx fs_reclaim irq_context: 0 epnested_mutex &ep->mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 epnested_mutex &ep->mtx &____s->seqcount irq_context: 0 epnested_mutex &ep->mtx &c->lock irq_context: 0 epnested_mutex &ep->mtx pool_lock#2 irq_context: 0 epnested_mutex &ep->mtx &f->f_lock irq_context: 0 epnested_mutex &ep->mtx &ei->socket.wq.wait irq_context: 0 epnested_mutex &ep->mtx &ep->lock irq_context: 0 epnested_mutex rcu_read_lock &f->f_lock irq_context: 0 &ep->mtx fs_reclaim irq_context: 0 &ep->mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ep->mtx &f->f_lock irq_context: 0 &ep->mtx pool_lock#2 irq_context: 0 &ep->mtx &group->notification_waitq irq_context: 0 &ep->mtx &group->notification_lock irq_context: 0 &ep->mtx &ep->lock irq_context: 0 &ep->mtx &sighand->signalfd_wqh irq_context: 0 &ep->mtx &sighand->siglock irq_context: 0 &ep->mtx &ei->socket.wq.wait irq_context: 0 &ep->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &n->list_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq log_wait.lock &p->pi_lock &rq->__lock irq_context: hardirq log_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss &n->list_lock irq_context: 0 tomoyo_ss &n->list_lock &c->lock irq_context: 0 mapping.invalidate_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock &wq#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 vmap_area_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &dentry->d_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock fs_reclaim irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 mount_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 &sb->s_type->i_lock_key#24 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 &wb->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 &wb->list_lock &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 quarantine_lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu quarantine_lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tomoyo_ss tomoyo_policy_lock &n->list_lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &n->list_lock &c->lock irq_context: 0 remove_cache_srcu &c->lock irq_context: 0 remove_cache_srcu &n->list_lock irq_context: 0 remove_cache_srcu &obj_hash[i].lock irq_context: 0 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq &dev->power.lock hrtimer_bases.lock irq_context: hardirq &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: softirq rcu_callback &pcp->lock &zone->lock irq_context: softirq rcu_callback &pcp->lock &zone->lock &____s->seqcount irq_context: 0 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 sb_writers#3 &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#30 sb_writers#3 lock#4 irq_context: 0 &type->s_umount_key#30 sb_writers#3 pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 &mapping->i_private_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 bit_wait_table + i irq_context: 0 &type->s_umount_key#30 sb_writers#3 &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 sb_writers#3 lock#4 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#30 &eli->li_list_mtx irq_context: 0 &type->s_umount_key#30 sb_writers#3 &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &base->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &base->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &retval->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &n->list_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &x->wait#26 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &__ctx->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_node_0 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock irq_context: 0 (wq_completion)kblockd irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &__ctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &retval->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock rcu_node_0 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &x->wait#26 irq_context: hardirq &x->wait#26 &p->pi_lock irq_context: hardirq &x->wait#26 &p->pi_lock &rq->__lock irq_context: hardirq &x->wait#26 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem (&timer.timer) irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock &q->requeue_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) &q->requeue_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) &hctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &hctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &nvmeq->sq_lock irq_context: hardirq &fq->mq_flush_lock irq_context: hardirq &fq->mq_flush_lock tk_core.seq.seqcount irq_context: hardirq &fq->mq_flush_lock &x->wait#26 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &type->s_umount_key#30 sb_writers#3 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 batched_entropy_u8.lock irq_context: 0 &type->i_mutex_dir_key#4 kfence_freelist_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &____s->seqcount irq_context: softirq rcu_callback &zone->lock irq_context: softirq rcu_callback &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &pcp->lock &zone->lock irq_context: softirq &(&tbl->managed_work)->timer irq_context: softirq &(&krcp->monitor_work)->timer irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) krc.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem quarantine_lock irq_context: 0 remove_cache_srcu &rq->__lock irq_context: 0 swap_lock irq_context: 0 sb_writers#7 irq_context: 0 sb_writers#7 mount_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rename_lock.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 fs_reclaim irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 iattr_mutex irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock &wq#2 irq_context: 0 kn->active fs_reclaim irq_context: 0 kn->active fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active pool_lock#2 irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] pool_lock#2 irq_context: 0 sb_writers#7 fs_reclaim irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 pool_lock#2 irq_context: 0 sb_writers#7 &mm->mmap_lock irq_context: 0 sb_writers#7 &of->mutex irq_context: 0 sb_writers#7 &of->mutex kn->active &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active &obj_hash[i].lock irq_context: 0 sb_writers#7 &obj_hash[i].lock irq_context: 0 &ep->mtx &mm->mmap_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] irq_context: 0 &kernfs_locks->open_file_mutex[count] &obj_hash[i].lock irq_context: 0 &kernfs_locks->open_file_mutex[count] pool_lock#2 irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock irq_context: 0 kn->active#2 fs_reclaim irq_context: 0 kn->active#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#2 pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &obj_hash[i].lock irq_context: 0 &nlk->wait irq_context: 0 kn->active#2 &c->lock irq_context: 0 kn->active#2 &pcp->lock &zone->lock irq_context: 0 kn->active#2 &____s->seqcount irq_context: 0 &kernfs_locks->open_file_mutex[count] &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &n->list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &n->list_lock &c->lock irq_context: 0 kn->active#2 &n->list_lock irq_context: 0 kn->active#2 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &c->lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 quarantine_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu quarantine_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active &c->lock irq_context: 0 kn->active &n->list_lock irq_context: 0 kn->active &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#5 &n->list_lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 kn->active &____s->seqcount irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#7 &c->lock irq_context: 0 kn->active &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#2 remove_cache_srcu irq_context: 0 kn->active#2 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &n->list_lock irq_context: 0 sb_writers#7 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active &____s->seqcount irq_context: 0 sb_writers#4 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 tomoyo_ss remove_cache_srcu irq_context: 0 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#2 remove_cache_srcu &c->lock irq_context: 0 kn->active#2 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#4 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &c->lock irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#2 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#2 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &c->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock &obj_hash[i].lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock &base->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#7 remove_cache_srcu irq_context: 0 sb_writers#7 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &rq->__lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) krc.lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &ep->mtx &c->lock irq_context: 0 &ep->mtx &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 &eli->li_list_mtx &obj_hash[i].lock irq_context: 0 &eli->li_list_mtx pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) irq_context: 0 ext4_li_mtx irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) krc.lock irq_context: 0 ext4_li_mtx &eli->li_list_mtx irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &obj_hash[i].lock irq_context: 0 ext4_li_mtx &obj_hash[i].lock irq_context: 0 ext4_li_mtx pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback pool_lock#2 irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &obj_hash[i].lock pool_lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu quarantine_lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &c->lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &n->list_lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &rq->__lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#2 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#2 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &rq->__lock irq_context: 0 &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 sb_writers#7 remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#7 remove_cache_srcu fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#7 remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#7 remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem batched_entropy_u8.lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock irq_context: 0 kn->active#2 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: softirq (&vblank->disable_timer) irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 batched_entropy_u8.lock irq_context: 0 sb_writers#7 kfence_freelist_lock irq_context: 0 sb_writers#7 &meta->lock irq_context: softirq (&q->timeout) irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)kblockd (work_completion)(&q->timeout_work) irq_context: 0 kn->active remove_cache_srcu irq_context: 0 kn->active remove_cache_srcu quarantine_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 kn->active#2 &rq->__lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock irq_context: 0 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &rq->__lock irq_context: 0 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#3 fs_reclaim irq_context: 0 kn->active#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#3 pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &____s->seqcount irq_context: 0 kn->active#3 &c->lock irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &c->lock irq_context: 0 kn->active#3 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 quarantine_lock irq_context: 0 kn->active#3 &n->list_lock irq_context: 0 kn->active#3 &n->list_lock &c->lock irq_context: 0 kn->active#3 remove_cache_srcu irq_context: 0 kn->active#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &pcp->lock &zone->lock irq_context: 0 kn->active#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &meta->lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 kn->active#3 remove_cache_srcu &c->lock irq_context: 0 kn->active#3 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock irq_context: 0 &mm->mmap_lock &n->list_lock irq_context: 0 &mm->mmap_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &wb->list_lock irq_context: 0 sb_writers#7 &wb->list_lock &sb->s_type->i_lock_key#24 irq_context: 0 &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu quarantine_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sem->wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 &sem->wait_lock irq_context: 0 &sem->wait_lock irq_context: 0 sb_writers#5 &sem->wait_lock irq_context: 0 sb_writers#5 &p->pi_lock irq_context: 0 sb_writers#5 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem batched_entropy_u8.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 kn->active#4 fs_reclaim irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#4 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#4 &n->list_lock irq_context: 0 kn->active#4 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &device->physical_node_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 udc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fw_lock irq_context: 0 kn->active#4 remove_cache_srcu irq_context: 0 kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &pcp->lock &zone->lock irq_context: 0 kn->active#4 remove_cache_srcu &c->lock irq_context: 0 kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#4 &pcp->lock &zone->lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rfkill->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rq->__lock irq_context: 0 sb_writers#7 &rq->__lock irq_context: 0 sb_writers#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq hrtimer_bases.lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 kn->active#4 &rq->__lock irq_context: hardirq hrtimer_bases.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &n->list_lock irq_context: 0 &sig->cred_guard_mutex &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu irq_context: 0 &mm->mmap_lock remove_cache_srcu quarantine_lock irq_context: 0 &mm->mmap_lock &n->list_lock irq_context: 0 &mm->mmap_lock &n->list_lock &c->lock irq_context: 0 &u->bindlock irq_context: 0 &u->bindlock fs_reclaim irq_context: 0 &u->bindlock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &u->bindlock pool_lock#2 irq_context: 0 &u->bindlock batched_entropy_u32.lock irq_context: 0 &u->bindlock &net->unx.table.locks[i] irq_context: 0 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 irq_context: 0 &u->bindlock &net->unx.table.locks[i]/1 irq_context: 0 &u->lock &u->lock/1 &sk->sk_peer_lock irq_context: 0 &u->lock &u->lock/1 &dentry->d_lock irq_context: 0 &u->lock &u->lock/1 &sk->sk_peer_lock &sk->sk_peer_lock/1 irq_context: 0 &u->lock &u->lock/1 &sk->sk_peer_lock/1 irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &ei->socket.wq.wait irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &dentry->d_lock irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &group->notification_waitq irq_context: 0 &group->notification_lock irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 &ep->mtx &mm->mmap_lock &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &rq->__lock irq_context: 0 tomoyo_ss batched_entropy_u8.lock crngs.lock irq_context: 0 tomoyo_ss batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 &u->iolock &base->lock irq_context: 0 &u->iolock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&ovs_net->masks_rebalance)->timer irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) ovs_mutex irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &base->lock &obj_hash[i].lock irq_context: 0 kn->active#5 fs_reclaim irq_context: 0 kn->active#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &p->lock &of->mutex irq_context: 0 &p->lock &of->mutex kn->active#5 param_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rename_lock.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &dentry->d_lock irq_context: 0 sb_writers#7 tomoyo_ss irq_context: 0 sb_writers#7 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#7 kn->active#5 fs_reclaim irq_context: 0 sb_writers#7 kn->active#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 kn->active#5 &kernfs_locks->open_file_mutex[count] irq_context: 0 sb_writers#7 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 sb_writers#7 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 tomoyo_ss &c->lock irq_context: 0 sb_writers#7 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#7 iattr_mutex irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &____s->seqcount irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &c->lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex pool_lock#2 irq_context: 0 &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex tk_core.seq.seqcount irq_context: 0 &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &sem->wait_lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &p->pi_lock irq_context: 0 &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#5 param_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#5 param_lock disk_events_mutex irq_context: 0 batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 remove_cache_srcu &base->lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#4 pool_lock#2 irq_context: 0 &p->lock &of->mutex kn->active#4 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &p->lock &of->mutex kn->active#4 quarantine_lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &base->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &pcp->lock &zone->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &____s->seqcount irq_context: 0 &p->lock remove_cache_srcu irq_context: 0 &p->lock remove_cache_srcu quarantine_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock irq_context: 0 &p->lock &n->list_lock irq_context: 0 &p->lock &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &n->list_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &n->list_lock &c->lock irq_context: softirq (&cb->timer) tk_core.seq.seqcount irq_context: 0 &p->lock &of->mutex kn->active#4 &device->physical_node_lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 &____s->seqcount#6/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#6 fs_reclaim irq_context: 0 kn->active#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &of->mutex irq_context: 0 kn->active#7 fs_reclaim irq_context: 0 kn->active#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#8 fs_reclaim irq_context: 0 kn->active#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#9 fs_reclaim irq_context: 0 kn->active#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 udc_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &p->lock remove_cache_srcu &c->lock irq_context: 0 &p->lock remove_cache_srcu &n->list_lock irq_context: 0 &p->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &p->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &p->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u32.lock crngs.lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#10 fs_reclaim irq_context: 0 kn->active#10 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#11 fs_reclaim irq_context: 0 kn->active#11 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#12 fs_reclaim irq_context: 0 kn->active#12 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock rcu_node_0 irq_context: 0 kn->active#13 fs_reclaim irq_context: 0 kn->active#13 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#14 fs_reclaim irq_context: 0 kn->active#14 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#15 fs_reclaim irq_context: 0 kn->active#15 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#16 fs_reclaim irq_context: 0 kn->active#16 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock rcu_read_lock rcu_node_0 irq_context: 0 &p->lock rcu_read_lock &rq->__lock irq_context: 0 &p->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &n->list_lock irq_context: 0 kn->active#17 fs_reclaim irq_context: 0 kn->active#17 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#17 &c->lock irq_context: 0 kn->active#17 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#17 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#17 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#18 fs_reclaim irq_context: 0 kn->active#18 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#18 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#18 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#18 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#18 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#15 &c->lock irq_context: 0 kn->active#19 fs_reclaim irq_context: 0 kn->active#19 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#19 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#19 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#19 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &c->lock irq_context: 0 kn->active#20 fs_reclaim irq_context: 0 kn->active#20 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#20 dev_base_lock irq_context: 0 kn->active#21 fs_reclaim irq_context: 0 kn->active#21 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#21 dev_base_lock irq_context: 0 kn->active#22 fs_reclaim irq_context: 0 kn->active#22 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#2 &n->list_lock irq_context: 0 &type->i_mutex_dir_key#2 &n->list_lock &c->lock irq_context: 0 kn->active#23 fs_reclaim irq_context: 0 kn->active#23 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#23 dev_base_lock irq_context: 0 kn->active#24 fs_reclaim irq_context: 0 kn->active#24 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#24 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#24 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#24 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#24 dev_base_lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &n->list_lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &n->list_lock &c->lock irq_context: 0 sb_writers#5 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sem->wait_lock irq_context: 0 kn->active#25 fs_reclaim irq_context: 0 kn->active#25 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#25 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#25 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#25 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#26 fs_reclaim irq_context: 0 kn->active#26 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#26 &c->lock irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#27 fs_reclaim irq_context: 0 kn->active#27 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#27 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#27 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#27 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#28 fs_reclaim irq_context: 0 kn->active#28 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#28 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#28 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#28 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#28 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 rcu_read_lock &____s->seqcount#6 irq_context: 0 &sb->s_type->i_mutex_key#15 irq_context: 0 mapping.invalidate_lock#2 irq_context: 0 mapping.invalidate_lock#2 mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock#2 &____s->seqcount irq_context: 0 mapping.invalidate_lock#2 &xa->xa_lock#9 irq_context: 0 mapping.invalidate_lock#2 &xa->xa_lock#9 pool_lock#2 irq_context: 0 mapping.invalidate_lock#2 &xa->xa_lock#9 &c->lock irq_context: 0 kn->active#29 fs_reclaim irq_context: 0 mapping.invalidate_lock#2 lock#4 irq_context: 0 kn->active#29 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock#2 pool_lock#2 irq_context: 0 kn->active#29 &kernfs_locks->open_file_mutex[count] irq_context: 0 mapping.invalidate_lock#2 tk_core.seq.seqcount irq_context: 0 kn->active#29 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#29 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &p->lock &of->mutex kn->active#29 fs_reclaim irq_context: 0 &p->lock &of->mutex kn->active#29 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#29 pool_lock#2 irq_context: 0 &p->lock &of->mutex kn->active#29 &obj_hash[i].lock irq_context: 0 kn->active#19 &c->lock irq_context: 0 kn->active#19 &____s->seqcount irq_context: 0 mapping.invalidate_lock#2 &c->lock irq_context: 0 mapping.invalidate_lock#2 lock#4 &lruvec->lru_lock irq_context: 0 &p->lock &of->mutex kn->active#4 fw_lock irq_context: 0 kn->active#30 fs_reclaim irq_context: 0 kn->active#30 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#30 &c->lock irq_context: 0 kn->active#30 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#30 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#30 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#30 dev_base_lock irq_context: 0 kn->active#31 fs_reclaim irq_context: 0 kn->active#31 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#31 &c->lock irq_context: 0 kn->active#31 &n->list_lock irq_context: 0 kn->active#31 &n->list_lock &c->lock irq_context: 0 kn->active#31 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#31 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#31 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &of->mutex kn->active#31 &dev->power.lock irq_context: 0 &of->mutex kn->active#31 pci_lock irq_context: 0 kn->active#32 fs_reclaim irq_context: 0 kn->active#32 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#32 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#32 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#32 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#33 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 mapping.invalidate_lock#2 &pcp->lock &zone->lock irq_context: 0 &evdev->client_lock irq_context: 0 &evdev->mutex irq_context: 0 &evdev->mutex &dev->mutex#2 irq_context: 0 kn->active#33 fs_reclaim irq_context: 0 kn->active#33 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#33 &c->lock irq_context: 0 kn->active#33 &n->list_lock irq_context: 0 kn->active#33 &n->list_lock &c->lock irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 mapping.invalidate_lock#2 &n->list_lock irq_context: 0 mapping.invalidate_lock#2 &n->list_lock &c->lock irq_context: 0 kn->active#16 &c->lock irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#34 fs_reclaim irq_context: 0 kn->active#34 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#34 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#34 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#34 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 slock-AF_INET/1 irq_context: 0 rtnl_mutex devnet_rename_sem irq_context: 0 rtnl_mutex devnet_rename_sem (console_sem).lock irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex devnet_rename_sem &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem fs_reclaim irq_context: 0 rtnl_mutex devnet_rename_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex devnet_rename_sem pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem &k->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem fs_reclaim irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem kernfs_rename_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem &sem->wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem &p->pi_lock irq_context: 0 rtnl_mutex devnet_rename_sem &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem kernfs_rename_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &____s->seqcount irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex fs_reclaim irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex devnet_rename_sem &obj_hash[i].lock irq_context: 0 rtnl_mutex &x->wait#2 irq_context: 0 &evdev->mutex &dev->mutex#2 &obj_hash[i].lock irq_context: 0 &evdev->mutex &dev->mutex#2 &x->wait#2 irq_context: 0 &evdev->mutex &dev->mutex#2 &rq->__lock irq_context: 0 &evdev->mutex &dev->mutex#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &evdev->mutex &dev->mutex#2 rcu_read_lock &rq->__lock irq_context: 0 &evdev->mutex &dev->mutex#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &rq->__lock irq_context: 0 &p->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 fs_reclaim &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 &sem->wait_lock irq_context: 0 &type->i_mutex_dir_key#2 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &sem->wait_lock irq_context: 0 sb_writers &p->pi_lock irq_context: 0 sb_writers &p->pi_lock &rq->__lock irq_context: 0 sb_writers &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sem->wait_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 kn->active#4 &c->lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss quarantine_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#6 &c->lock irq_context: 0 kn->active#8 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &rq->__lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &retval->lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &c->lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &n->list_lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 kn->active#18 &c->lock irq_context: 0 kn->active#12 &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex sysctl_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex sysctl_lock pool_lock#2 irq_context: 0 rtnl_mutex sysctl_lock krc.lock irq_context: 0 rtnl_mutex &nft_net->commit_mutex irq_context: 0 rtnl_mutex proc_subdir_lock irq_context: 0 rtnl_mutex &ent->pde_unload_lock irq_context: 0 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 rtnl_mutex proc_subdir_lock irq_context: 0 rtnl_mutex &n->list_lock irq_context: 0 rtnl_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex target_list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_INET irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#7 &c->lock irq_context: 0 kn->active#9 &c->lock irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#11 &c->lock irq_context: 0 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &n->list_lock &c->lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock rcu_node_0 irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &rq->__lock irq_context: 0 kn->active#10 &c->lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &rcu_state.gp_wq irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#10 remove_cache_srcu irq_context: 0 kn->active#10 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 &____s->seqcount#6/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#5 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &lruvec->lru_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &info->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &xa->xa_lock#9 irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss quarantine_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#25 &c->lock irq_context: 0 kn->active#25 &n->list_lock irq_context: 0 kn->active#25 &n->list_lock &c->lock irq_context: 0 kn->active#35 fs_reclaim irq_context: 0 kn->active#35 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#35 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#35 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#35 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#8 &n->list_lock irq_context: 0 kn->active#8 &n->list_lock &c->lock irq_context: 0 kn->active#11 remove_cache_srcu irq_context: 0 kn->active#11 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#11 remove_cache_srcu &c->lock irq_context: 0 kn->active#11 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#11 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#11 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#11 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#9 &n->list_lock irq_context: 0 kn->active#9 &n->list_lock &c->lock irq_context: 0 kn->active#12 &n->list_lock irq_context: 0 kn->active#12 &n->list_lock &c->lock irq_context: 0 kn->active#36 fs_reclaim irq_context: 0 kn->active#36 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#36 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#36 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#36 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#37 fs_reclaim irq_context: 0 kn->active#37 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#37 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#37 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#37 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#36 &c->lock irq_context: 0 kn->active#36 &n->list_lock irq_context: 0 kn->active#36 &n->list_lock &c->lock irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &rq->__lock irq_context: 0 &ep->mtx &pipe->rd_wait irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rq->__lock irq_context: 0 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock quarantine_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 videodev_lock irq_context: 0 &dev_instance->mutex irq_context: 0 &dev_instance->mutex fs_reclaim irq_context: 0 &dev_instance->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev_instance->mutex pool_lock#2 irq_context: 0 &dev_instance->mutex vicodec_core:1851:(hdl)->_lock irq_context: 0 &dev_instance->mutex &c->lock irq_context: 0 &dev_instance->mutex &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev_instance->mutex &vdev->fh_lock irq_context: 0 &mm->mmap_lock &sem->wait_lock irq_context: 0 &mm->mmap_lock &p->pi_lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &sem->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx key#10 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override kfence_freelist_lock irq_context: 0 &dev_instance->mutex &n->list_lock irq_context: 0 &dev_instance->mutex &n->list_lock &c->lock irq_context: 0 &mdev->req_queue_mutex irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &m2m_dev->job_spinlock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &q->done_wq irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &q->mmap_lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex pool_lock#2 irq_context: 0 &mdev->req_queue_mutex &vdev->fh_lock irq_context: 0 &mdev->req_queue_mutex &mdev->graph_mutex irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock pool_lock#2 irq_context: 0 &mdev->req_queue_mutex &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex pool_lock#2 irq_context: 0 &pipe->rd_wait &ep->lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &sem->wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock irq_context: 0 fh->state->lock irq_context: 0 &vdev->fh_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &folio_wait_table[i] irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock key irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->dev_mutex irq_context: 0 &dev->dev_mutex fs_reclaim irq_context: 0 &dev->dev_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->dev_mutex pool_lock#2 irq_context: 0 &dev->dev_mutex vim2m:1183:(hdl)->_lock irq_context: 0 &dev->dev_mutex &obj_hash[i].lock irq_context: 0 &dev->dev_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->dev_mutex &vdev->fh_lock irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock pool_lock#2 irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &m2m_dev->job_spinlock irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &q->done_wq irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &q->mmap_lock irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex pool_lock#2 irq_context: 0 &ep->mtx rcu_read_lock &pipe->rd_wait irq_context: 0 &ep->mtx &obj_hash[i].lock irq_context: 0 &sighand->signalfd_wqh irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#38 fs_reclaim irq_context: 0 kn->active#38 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#38 &c->lock irq_context: 0 kn->active#38 &pcp->lock &zone->lock irq_context: 0 kn->active#38 &____s->seqcount irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu irq_context: 0 sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &p->pi_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] &p->pi_lock irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vcapture->lock irq_context: 0 &vcapture->lock &q->done_wq irq_context: 0 &vcapture->lock &q->mmap_lock irq_context: 0 &mdev->graph_mutex irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 &u->iolock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock kfence_freelist_lock irq_context: 0 &dev->mutex#3 irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &vdev->fh_lock irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &q->done_wq irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &q->mmap_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &meta->lock irq_context: 0 &type->i_mutex_dir_key#2 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock key irq_context: 0 &mm->mmap_lock pcpu_lock irq_context: 0 &mm->mmap_lock percpu_counters_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pool_lock irq_context: 0 rcu_read_lock key irq_context: 0 rcu_read_lock pcpu_lock irq_context: 0 rcu_read_lock percpu_counters_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rq->__lock irq_context: 0 &p->lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 kn->active#4 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 &p->lock remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &n->list_lock &c->lock irq_context: 0 &lo->lo_mutex irq_context: 0 &disk->open_mutex &lo->lo_mutex irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &disk->open_mutex nbd_index_mutex irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock pool_lock#2 irq_context: 0 &disk->open_mutex &nbd->config_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &bdev->bd_size_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &q->queue_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(sdp, lock) irq_context: 0 &disk->open_mutex &nbd->config_lock set->srcu irq_context: 0 &disk->open_mutex &nbd->config_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &nbd->config_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &disk->open_mutex &nbd->config_lock &x->wait#3 irq_context: 0 &disk->open_mutex &nbd->config_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 &disk->open_mutex &nbd->config_lock set->srcu irq_context: 0 &disk->open_mutex &nbd->config_lock pool_lock#2 irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &c->lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &____s->seqcount irq_context: 0 &disk->open_mutex &nbd->config_lock rcu_read_lock &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rfkill->lock irq_context: 0 kn->active#20 &c->lock irq_context: 0 kn->active#20 &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#21 &c->lock irq_context: 0 kn->active#21 &pcp->lock &zone->lock irq_context: 0 kn->active#21 &____s->seqcount irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 &disk->open_mutex &new->lock irq_context: 0 &disk->open_mutex &new->lock &mtdblk->cache_mutex irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#20 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#39 fs_reclaim irq_context: 0 kn->active#39 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#39 &c->lock irq_context: 0 kn->active#39 &pcp->lock &zone->lock irq_context: 0 kn->active#39 &____s->seqcount irq_context: 0 kn->active#39 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#39 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#39 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mtd->master.chrdev_lock irq_context: 0 &mtd->master.chrdev_lock &rq->__lock irq_context: 0 &mtd->master.chrdev_lock &mm->mmap_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 percpu_counters_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu quarantine_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &n->list_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &base->lock &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 &ep->mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &base->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &p->pi_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 kn->active#4 batched_entropy_u8.lock irq_context: 0 kn->active#4 kfence_freelist_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 quarantine_lock irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&wb->dwork)->timer irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &p->sequence irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 kfence_freelist_lock irq_context: 0 tomoyo_ss rcu_node_0 irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 &____s->seqcount#6/1 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &wq irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock/2 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock pool_lock#2 irq_context: 0 &dentry->d_lock &lru->node[i].lock irq_context: 0 kn->active#40 fs_reclaim irq_context: 0 kn->active#40 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#40 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#40 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#40 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &group->notification_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &group->notification_waitq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &group->notification_waitq &p->pi_lock irq_context: 0 sb_writers#5 &s->s_inode_list_lock irq_context: 0 sb_writers#5 &info->lock irq_context: 0 sb_writers#5 &obj_hash[i].lock irq_context: 0 sb_writers#5 pool_lock#2 irq_context: 0 sb_writers#5 &sbinfo->stat_lock irq_context: 0 sb_writers#5 &xa->xa_lock#9 irq_context: 0 sb_writers#5 &fsnotify_mark_srcu irq_context: 0 &mark->lock irq_context: 0 &group->inotify_data.idr_lock irq_context: 0 &group->inotify_data.idr_lock &obj_hash[i].lock irq_context: 0 &group->inotify_data.idr_lock pool_lock#2 irq_context: 0 &conn->lock irq_context: 0 destroy_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work irq_context: 0 (wq_completion)events_unbound connector_reaper_work destroy_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(sdp, lock) irq_context: 0 (wq_completion)events_unbound connector_reaper_work &fsnotify_mark_srcu irq_context: 0 (wq_completion)events_unbound connector_reaper_work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound connector_reaper_work &x->wait#3 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fs/notify/mark.c:89 irq_context: 0 (wq_completion)events_unbound connector_reaper_work pool_lock#2 irq_context: 0 (wq_completion)events_unbound (reaper_work).work irq_context: 0 (wq_completion)events_unbound (reaper_work).work destroy_lock irq_context: 0 (reaper_work).work irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(sdp, lock) irq_context: 0 (wq_completion)events_unbound (reaper_work).work &fsnotify_mark_srcu irq_context: 0 (wq_completion)events_unbound (reaper_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &x->wait#3 irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work pool_lock#2 irq_context: 0 &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 (&journal->j_commit_timer) irq_context: 0 &journal->j_checkpoint_mutex irq_context: 0 &journal->j_checkpoint_mutex mmu_notifier_invalidate_range_start irq_context: 0 &journal->j_checkpoint_mutex pool_lock#2 irq_context: 0 &journal->j_checkpoint_mutex tk_core.seq.seqcount irq_context: 0 &journal->j_checkpoint_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 &journal->j_checkpoint_mutex rcu_read_lock &nvmeq->sq_lock irq_context: 0 &journal->j_checkpoint_mutex bit_wait_table + i irq_context: 0 &journal->j_checkpoint_mutex &rq->__lock irq_context: 0 &journal->j_checkpoint_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &journal->j_checkpoint_mutex &journal->j_state_lock irq_context: 0 &journal->j_state_lock &journal->j_wait_updates irq_context: 0 &journal->j_list_lock irq_context: 0 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &ei->i_es_lock irq_context: 0 &mapping->i_private_lock irq_context: 0 &ret->b_state_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock irq_context: 0 &ei->i_es_lock key#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock irq_context: 0 &journal->j_state_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &journal->j_state_lock &journal->j_list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 rcu_read_lock &retval->lock irq_context: 0 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &fq->mq_flush_lock &q->requeue_lock irq_context: 0 &fq->mq_flush_lock &obj_hash[i].lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) &__ctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &__ctx->lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &rq->__lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &memcg->move_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &xa->xa_lock#9 irq_context: 0 &ret->b_state_lock &journal->j_list_lock &sb->s_type->i_lock_key#3 irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->list_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->list_lock &sb->s_type->i_lock_key#3 irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 &journal->j_state_lock &journal->j_list_lock irq_context: 0 &sbi->s_md_lock irq_context: 0 &journal->j_fc_wait irq_context: 0 &journal->j_history_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 batched_entropy_u8.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &meta->lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu quarantine_lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &c->lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &n->list_lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex rlock-AF_NETLINK irq_context: 0 rtnl_mutex (inetaddr_validator_chain).rwsem irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fib_info_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &dir->lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rhashtable_bucket irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex batched_entropy_u32.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex rhashtable_bucket irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex rhashtable_bucket rhashtable_bucket/1 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_lock irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_wait.lock irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_NETLINK irq_context: 0 &sb->s_type->i_mutex_key#10 &nlk->wait irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem hwsim_radio_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex _xmit_LOOPBACK irq_context: 0 rtnl_mutex netpoll_srcu irq_context: 0 rtnl_mutex quarantine_lock irq_context: 0 rtnl_mutex remove_cache_srcu irq_context: 0 rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex &im->lock irq_context: 0 rtnl_mutex fib_info_lock irq_context: 0 rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex cbs_list_lock irq_context: 0 rtnl_mutex &ndev->lock irq_context: 0 rtnl_mutex &idev->mc_lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem irq_context: 0 rtnl_mutex rcu_read_lock &net->ipv6.addrconf_hash_lock irq_context: 0 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 rtnl_mutex &ifa->lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex &tb->tb6_lock irq_context: 0 rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &tb->tb6_lock &____s->seqcount irq_context: 0 rtnl_mutex &tb->tb6_lock &c->lock irq_context: 0 rtnl_mutex &tb->tb6_lock pool_lock#2 irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_lock irq_context: 0 rtnl_mutex &tb->tb6_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &ndev->lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &rq->__lock irq_context: softirq rcu_callback &dir->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: softirq rcu_callback &dir->lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss batched_entropy_u8.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &meta->lock irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &n->list_lock irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &n->list_lock &c->lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_INET6 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 inode_hash_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 sb_writers#5 tomoyo_ss irq_context: 0 sb_writers#5 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#5 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &xattrs->lock irq_context: 0 &u->lock/1 irq_context: 0 userns_state_mutex irq_context: 0 mapping.invalidate_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &mapping->i_mmap_rwsem irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#5 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &lruvec->lru_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock sb_writers#5 irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &mm->mmap_lock irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &____s->seqcount irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &xa->xa_lock#9 irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &sb->s_type->i_lock_key irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &info->lock irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 irq_context: 0 &sb->s_type->i_lock_key#4 irq_context: 0 &sb->s_type->i_lock_key#4 &dentry->d_lock irq_context: 0 sk_lock-AF_UNIX irq_context: 0 sk_lock-AF_UNIX slock-AF_UNIX irq_context: 0 slock-AF_UNIX irq_context: 0 cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem key irq_context: 0 cgroup_threadgroup_rwsem pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem percpu_counters_lock irq_context: 0 cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 sk_lock-AF_INET irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 irq_context: 0 slock-AF_INET#2 irq_context: 0 sk_lock-AF_INET6 irq_context: 0 sk_lock-AF_INET6 slock-AF_INET6 irq_context: 0 slock-AF_INET6 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss &____s->seqcount irq_context: 0 vmap_purge_lock &rq->__lock irq_context: 0 vmap_purge_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 vmap_purge_lock &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_INET &table->hash[i].lock irq_context: 0 sk_lock-AF_INET &table->hash[i].lock clock-AF_INET irq_context: 0 sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 sk_lock-AF_INET6 &table->hash[i].lock clock-AF_INET6 irq_context: 0 vmap_purge_lock vmap_purge_lock.wait_lock irq_context: 0 sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 vmap_purge_lock.wait_lock irq_context: 0 sk_lock-AF_NETLINK &mm->mmap_lock irq_context: 0 sk_lock-AF_NETLINK fs_reclaim irq_context: 0 sk_lock-AF_NETLINK fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_NETLINK pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK free_vmap_area_lock irq_context: 0 sk_lock-AF_NETLINK free_vmap_area_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK free_vmap_area_lock pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK vmap_area_lock irq_context: 0 sk_lock-AF_NETLINK &____s->seqcount irq_context: 0 sk_lock-AF_NETLINK &c->lock irq_context: 0 sk_lock-AF_NETLINK pcpu_alloc_mutex irq_context: 0 sk_lock-AF_NETLINK pcpu_alloc_mutex pcpu_lock irq_context: 0 sk_lock-AF_NETLINK &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK batched_entropy_u32.lock irq_context: 0 sk_lock-AF_NETLINK batched_entropy_u32.lock crngs.lock irq_context: 0 sk_lock-AF_NETLINK vmap_purge_lock irq_context: 0 sk_lock-AF_NETLINK vmap_purge_lock purge_vmap_area_lock irq_context: 0 sk_lock-AF_NETLINK &fp->aux->used_maps_mutex irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#41 fs_reclaim irq_context: 0 kn->active#41 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#41 &c->lock irq_context: 0 kn->active#41 &pcp->lock &zone->lock irq_context: 0 kn->active#41 &____s->seqcount irq_context: 0 kn->active#41 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#41 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#41 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &ndev->lock &ifa->lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq#2 irq_context: 0 vlan_ioctl_mutex &mm->mmap_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex irq_context: 0 cb_lock irq_context: 0 cb_lock genl_mutex irq_context: 0 cb_lock genl_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex &c->lock irq_context: 0 cb_lock genl_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rlock-AF_NETLINK irq_context: 0 cb_lock genl_mutex &obj_hash[i].lock irq_context: 0 cb_lock fs_reclaim irq_context: 0 cb_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock &c->lock irq_context: 0 cb_lock pool_lock#2 irq_context: 0 cb_lock rlock-AF_NETLINK irq_context: 0 cb_lock rtnl_mutex irq_context: 0 cb_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &____s->seqcount irq_context: 0 cb_lock &____s->seqcount irq_context: 0 dev_addr_sem irq_context: 0 cb_lock &n->list_lock irq_context: 0 cb_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex &n->list_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx irq_context: 0 cb_lock &rdev->wiphy.mtx irq_context: 0 cb_lock &rdev->wiphy.mtx fs_reclaim irq_context: 0 cb_lock &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock &rdev->wiphy.mtx &c->lock irq_context: 0 cb_lock &rdev->wiphy.mtx &____s->seqcount irq_context: 0 cb_lock &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock &rdev->wiphy.mtx rlock-AF_NETLINK irq_context: 0 cb_lock nlk_cb_mutex-GENERIC irq_context: 0 cb_lock nlk_cb_mutex-GENERIC fs_reclaim irq_context: 0 cb_lock nlk_cb_mutex-GENERIC fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &c->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &____s->seqcount irq_context: 0 cb_lock nlk_cb_mutex-GENERIC pool_lock#2 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &pcp->lock &zone->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rtnl_mutex irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rtnl_mutex &rdev->wiphy.mtx irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rlock-AF_NETLINK irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &obj_hash[i].lock irq_context: 0 cb_lock &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#4 tomoyo_ss &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->alloc_lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 lock#4 &lruvec->lru_lock irq_context: 0 &type->i_mutex_dir_key#3 &n->list_lock irq_context: 0 &type->i_mutex_dir_key#3 &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &n->list_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 fs_reclaim irq_context: 0 sb_writers#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 sb_writers#5 lock#4 irq_context: 0 sb_writers#5 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#5 lock#5 irq_context: 0 sb_writers#5 &lruvec->lru_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem quarantine_lock irq_context: 0 tasklist_lock &sighand->siglock batched_entropy_u8.lock irq_context: 0 tasklist_lock &sighand->siglock kfence_freelist_lock irq_context: 0 &sighand->siglock &meta->lock irq_context: 0 &sighand->siglock kfence_freelist_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &mm->mmap_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 vmap_area_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 mount_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 &sb->s_type->i_lock_key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 &wb->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 &pipe->mutex/1 &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 &____s->seqcount irq_context: softirq (&net->sctp.addr_wq_timer) irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &obj_hash[i].lock pool_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &vma->vm_lock->lock remove_cache_srcu irq_context: 0 &vma->vm_lock->lock remove_cache_srcu quarantine_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &c->lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &n->list_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &____s->seqcount irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#3 batched_entropy_u8.lock irq_context: 0 sb_writers#3 kfence_freelist_lock irq_context: 0 sb_writers#3 &meta->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock fs_reclaim irq_context: 0 &pipe->mutex/1 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock quarantine_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key irq_context: 0 rtnl_mutex &dev_addr_list_lock_key &c->lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key &____s->seqcount irq_context: 0 rtnl_mutex &dev_addr_list_lock_key pool_lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &pnettable->lock irq_context: 0 rtnl_mutex smc_ib_devices.mutex irq_context: 0 rtnl_mutex napi_hash_lock irq_context: 0 rtnl_mutex lapb_list_lock irq_context: 0 tasklist_lock &sighand->siglock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_area_lock irq_context: 0 rtnl_mutex &sem->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock purge_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) pcpu_lock irq_context: 0 rtnl_mutex x25_neigh_list_lock irq_context: 0 rtnl_mutex console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex console_lock console_srcu console_owner irq_context: 0 rtnl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &u->lock &ei->socket.wq.wait irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock irq_context: 0 rtnl_mutex _xmit_ETHER irq_context: 0 rtnl_mutex &tb->tb6_lock rlock-AF_NETLINK irq_context: 0 rtnl_mutex _xmit_SLIP irq_context: 0 rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: hardirq log_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&eql->timer) irq_context: softirq (&eql->timer) &eql->queue.lock irq_context: softirq (&eql->timer) &obj_hash[i].lock irq_context: softirq (&eql->timer) &base->lock irq_context: softirq (&eql->timer) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex free_vmap_area_lock irq_context: 0 rtnl_mutex vmap_area_lock irq_context: 0 rtnl_mutex init_mm.page_table_lock irq_context: 0 rtnl_mutex &cma->lock irq_context: 0 rtnl_mutex cma_mutex irq_context: 0 rtnl_mutex cma_mutex &zone->lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 rtnl_mutex cma_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex lock#2 irq_context: 0 rtnl_mutex cma_mutex lock#2 &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cma_mutex lock#2 &rq->__lock irq_context: 0 rtnl_mutex cma_mutex lock#2 (work_completion)(work) irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock (wq_completion)mm_percpu_wq irq_context: 0 rtnl_mutex cma_mutex lock#2 &x->wait#10 irq_context: 0 rtnl_mutex cma_mutex lock#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cma_mutex &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex fs_reclaim irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex pool_lock#2 irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex jump_label_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex jump_label_mutex patch_lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &zone->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) (console_sem).lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex _xmit_ETHER &c->lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) lweventlist_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &obj_hash[i].lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &rq->__lock irq_context: hardirq log_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &rfkill->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &data->mutex irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 rtnl_mutex _xmit_ETHER &rdev->wiphy_work_lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &local->filter_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex cbs_list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock rlock-AF_NETLINK irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock _xmit_ETHER#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock noop_qdisc.q.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &sch->q.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex class irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex (&tbl->proxy_timer) irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &base->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &c->lock irq_context: 0 rtnl_mutex _xmit_VOID irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock irq_context: 0 &u->iolock &u->lock irq_context: 0 &u->iolock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &u->iolock &mm->mmap_lock fs_reclaim irq_context: 0 &u->iolock &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &u->iolock &mm->mmap_lock &____s->seqcount irq_context: 0 &u->iolock &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &u->iolock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex _xmit_X25 irq_context: 0 rtnl_mutex lapb_list_lock irq_context: 0 rtnl_mutex lapb_list_lock pool_lock#2 irq_context: 0 rtnl_mutex lapb_list_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex lapb_list_lock &base->lock irq_context: 0 rtnl_mutex lapb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapbeth->up_lock irq_context: 0 rtnl_mutex &lapb->lock irq_context: 0 rtnl_mutex &lapb->lock pool_lock#2 irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh pool_lock#2 irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh &____s->seqcount irq_context: 0 rtnl_mutex &lapb->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapb->lock &base->lock irq_context: 0 rtnl_mutex &lapb->lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET6 batched_entropy_u32.lock irq_context: 0 sk_lock-AF_INET6 batched_entropy_u32.lock crngs.lock irq_context: 0 sk_lock-AF_INET6 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &ul->lock irq_context: 0 sk_lock-AF_INET6 &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 pool_lock#2 irq_context: 0 sk_lock-AF_INET6 batched_entropy_u16.lock irq_context: 0 sk_lock-AF_INET6 batched_entropy_u16.lock crngs.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &table->hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &table->hash[i].lock &table->hash2[i].lock irq_context: hardirq &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: softirq rcu_callback &ul->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &p->alloc_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &c->lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &c->lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock clock-AF_INET irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock clock-AF_INET irq_context: 0 sk_lock-AF_INET &h->lhash2[i].lock irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock clock-AF_INET6 irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock clock-AF_INET6 irq_context: 0 sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 &tty->legacy_mutex &tty->ldisc_sem irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &c->lock irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock crngs.lock irq_context: 0 &tty->legacy_mutex &f->f_lock irq_context: 0 &tty->legacy_mutex &f->f_lock fasync_lock irq_context: 0 &tty->legacy_mutex &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex pool_lock#2 irq_context: 0 &tty->legacy_mutex tasklist_lock irq_context: 0 &tty->legacy_mutex tasklist_lock &sighand->siglock irq_context: 0 &tty->legacy_mutex tasklist_lock &sighand->siglock &tty->ctrl.lock irq_context: 0 rcu_read_lock &tty->ctrl.lock irq_context: 0 &tty->ctrl.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock &wq#2 irq_context: 0 &port_lock_key irq_context: 0 &buf->lock irq_context: 0 &tty->ldisc_sem rcu_read_lock &tty->ctrl.lock irq_context: 0 &tty->ldisc_sem &port_lock_key irq_context: 0 &tty->ldisc_sem &port->lock irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &tty->ldisc_sem &tty->flow.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->xattr_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 tk_core.seq.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &mm->mmap_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 lock#4 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &mapping->i_private_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &(ei->i_block_reservation_lock) irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &memcg->move_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &xa->xa_lock#9 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &wb->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &pcp->lock &zone->lock irq_context: 0 rtnl_mutex rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock_bh &base->lock irq_context: 0 rtnl_mutex rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rlock-AF_NETLINK irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock irq_context: softirq rcu_callback rcu_read_lock rt6_exception_lock irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &tty->termios_rwsem &tty->read_wait irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock (work_completion)(&buf->work) irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &rq->__lock irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock &lruvec->lru_lock irq_context: 0 &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &lapb->lock &c->lock irq_context: softirq &tx->clean_lock irq_context: softirq &tx->clean_lock &obj_hash[i].lock irq_context: softirq &tx->clean_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex lapb_list_lock &c->lock irq_context: 0 &net->packet.sklist_lock irq_context: 0 sk_lock-AF_PACKET irq_context: 0 sk_lock-AF_PACKET slock-AF_PACKET irq_context: 0 sk_lock-AF_PACKET &po->bind_lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock ptype_lock irq_context: 0 sk_lock-AF_PACKET &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET &x->wait#2 irq_context: 0 sk_lock-AF_PACKET &rq->__lock irq_context: 0 sk_lock-AF_PACKET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex class irq_context: 0 sk_lock-AF_PACKET &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (&tbl->proxy_timer) irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &rdev->wiphy_work_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock ptype_lock irq_context: 0 slock-AF_PACKET irq_context: 0 sk_lock-AF_PACKET &mm->mmap_lock irq_context: 0 sk_lock-AF_PACKET fs_reclaim irq_context: 0 sk_lock-AF_PACKET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_PACKET pool_lock#2 irq_context: 0 sk_lock-AF_PACKET free_vmap_area_lock irq_context: 0 sk_lock-AF_PACKET free_vmap_area_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET vmap_area_lock irq_context: 0 sk_lock-AF_PACKET &____s->seqcount irq_context: 0 sk_lock-AF_PACKET pcpu_alloc_mutex irq_context: 0 sk_lock-AF_PACKET pcpu_alloc_mutex pcpu_lock irq_context: 0 sk_lock-AF_PACKET batched_entropy_u32.lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock purge_vmap_area_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET &fp->aux->used_maps_mutex irq_context: 0 rlock-AF_PACKET irq_context: 0 wlock-AF_PACKET irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: softirq &(&idev->mc_ifc_work)->timer irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &ul->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_callback &ul->lock#2 irq_context: 0 rtnl_mutex &net->ipv6.addrconf_hash_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock krc.lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &rdev->wiphy_work_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &dir->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock krc.lock irq_context: 0 rtnl_mutex &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rt6_exception_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &dir->lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET6 fs_reclaim irq_context: 0 sk_lock-AF_INET6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET6 &mm->mmap_lock irq_context: 0 sk_lock-AF_INET6 once_lock irq_context: 0 sk_lock-AF_INET6 once_lock crngs.lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET6 &rq->__lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &tbl->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &n->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &____s->seqcount#8 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: softirq &(&idev->mc_dad_work)->timer irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq net/core/link_watch.c:31 irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rlock-AF_NETLINK irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock krc.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 sk_lock-AF_PACKET &c->lock irq_context: 0 &pipe->rd_wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &cfs_rq->removed.lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock krc.lock irq_context: 0 sk_lock-AF_PACKET pool_lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->packet.sklist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock ptype_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock &dir->lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET slock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 fanout_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 &x->wait#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 pcpu_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem irq_context: softirq &(&ifa->dad_work)->timer irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ul->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock &cfs_rq->removed.lock irq_context: softirq &tx->clean_lock rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rlock-AF_PACKET irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock once_lock irq_context: softirq rcu_read_lock rcu_read_lock once_lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq &tx->clean_lock &meta->lock irq_context: softirq &tx->clean_lock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &ul->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &ul->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &sighand->siglock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock batched_entropy_u8.lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock kfence_freelist_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &ep->mtx rcu_read_lock &sighand->signalfd_wqh irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx rcu_read_lock &ei->socket.wq.wait irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &____s->seqcount irq_context: 0 &vma->vm_lock->lock key irq_context: 0 &vma->vm_lock->lock pcpu_lock irq_context: 0 &vma->vm_lock->lock percpu_counters_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq drivers/base/dd.c:321 irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work device_links_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work device_links_lock &k->list_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work device_links_lock &k->k_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work deferred_probe_mutex irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work deferred_probe_work irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work &x->wait#10 irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work &rq->__lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &type->s_umount_key#30 &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &wb->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &type->s_umount_key#40 &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &memcg->move_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 batched_entropy_u8.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 kfence_freelist_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#5 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 key#12 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &wb->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &wb->list_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem lock#5 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &journal->j_state_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle lock#5 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &ei->i_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &mapping->i_private_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &ret->b_state_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &ret->b_state_lock &journal->j_list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &journal->j_revoke_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &pa->pa_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &lg->lg_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &memcg->move_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_raw_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &journal->j_wait_updates irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &nvmeq->sq_lock irq_context: hardirq &ei->i_completed_io_lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq rcu_read_lock &memcg->move_lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock &obj_hash[i].lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 (wq_completion)ext4-rsv-conversion irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &ei->i_completed_io_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &journal->j_state_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &journal->j_state_lock &journal->j_wait_reserved irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_es_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_raw_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &journal->j_wait_updates irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &ext4__ioend_wq[i] irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &ret->b_uptodate_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &memcg->move_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: softirq rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq rcu_read_lock once_lock irq_context: softirq rcu_read_lock once_lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&wb->bw_dwork)->timer irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->bw_dwork)->work) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->bw_dwork)->work) &wb->list_lock irq_context: softirq (&journal->j_commit_timer) &p->pi_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock &obj_hash[i].lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock &base->lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock key#13 irq_context: softirq rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: softirq (&dev->watchdog_timer) irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock &obj_hash[i].lock irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock &base->lock irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 &c->lock irq_context: 0 sk_lock-AF_INET6 &____s->seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) irq_context: softirq (&lapb->t1timer) &lapb->lock irq_context: softirq (&lapb->t1timer) &lapb->lock pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock &c->lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock &base->lock irq_context: softirq (&lapb->t1timer) &lapb->lock &base->lock &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock batched_entropy_u8.lock irq_context: softirq (&lapb->t1timer) &lapb->lock kfence_freelist_lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &meta->lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 kfence_freelist_lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh &meta->lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 sk_lock-AF_PACKET &n->list_lock irq_context: 0 sk_lock-AF_PACKET &n->list_lock &c->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: softirq (&dom->period_timer) irq_context: softirq (&dom->period_timer) key#14 irq_context: softirq (&dom->period_timer) &p->sequence irq_context: softirq (&dom->period_timer) &obj_hash[i].lock irq_context: softirq (&dom->period_timer) &base->lock irq_context: softirq (&dom->period_timer) &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock batched_entropy_u8.lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_PACKET batched_entropy_u32.lock crngs.lock irq_context: 0 rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rlock-AF_NETLINK irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &dir->lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &dentry->d_lock irq_context: 0 tasklist_lock &sighand->siglock batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#5 &dentry->d_lock irq_context: 0 sb_writers#5 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#5 tomoyo_ss &n->list_lock &c->lock irq_context: 0 hostname_poll.wait.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &pl->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &pl->lock key#12 irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock once_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock once_lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock batched_entropy_u32.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &base->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &hashinfo->ehash_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock &base->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &n->lock irq_context: softirq rcu_read_lock &n->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &n->lock &base->lock irq_context: softirq rcu_read_lock &n->lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &n->lock &(&n->ha_lock)->lock irq_context: softirq rcu_read_lock &n->lock &(&n->ha_lock)->lock &____s->seqcount#8 irq_context: softirq rcu_read_lock rcu_read_lock &n->lock irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock lock#8 irq_context: softirq rcu_read_lock rcu_read_lock id_table_lock irq_context: softirq rcu_read_lock &n->lock irq_context: softirq rcu_read_lock &n->lock &____s->seqcount#8 irq_context: softirq rcu_read_lock nl_table_lock irq_context: softirq rcu_read_lock rlock-AF_NETLINK irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock &dir->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 batched_entropy_u16.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &tcp_hashinfo.bhash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &hashinfo->ehash_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 (&req->rsk_timer) irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &base->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &icsk->icsk_accept_queue.rskq_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &icsk->icsk_accept_queue.rskq_lock irq_context: 0 sk_lock-AF_INET clock-AF_INET irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &journal->j_list_lock &obj_hash[i].lock irq_context: 0 &journal->j_list_lock pool_lock#2 irq_context: 0 rcu_read_lock &base->lock irq_context: 0 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &xa->xa_lock#9 pool_lock#2 irq_context: 0 sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &base->lock irq_context: 0 sk_lock-AF_INET &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET fs_reclaim irq_context: 0 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &____s->seqcount irq_context: 0 sk_lock-AF_INET &c->lock irq_context: 0 sk_lock-AF_INET pool_lock#2 irq_context: 0 sk_lock-AF_INET &mm->mmap_lock irq_context: 0 sk_lock-AF_INET tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &sd->defer_lock irq_context: softirq &sd->defer_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock &obj_hash[i].lock irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: softirq (&icsk->icsk_retransmit_timer) irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 irq_context: softirq (&icsk->icsk_delack_timer) irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 tk_core.seq.seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &c->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 pool_lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &ul->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock &ul->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 kfence_freelist_lock irq_context: 0 &pipe->mutex/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &u->iolock &dir->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET clock-AF_INET irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock tcp_metrics_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock tcp_metrics_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &hashinfo->ehash_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sighand->siglock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_callback uidhash_lock irq_context: softirq rcu_callback percpu_counters_lock irq_context: softirq rcu_callback ucounts_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &u->iolock rcu_read_lock rcu_node_0 irq_context: 0 &u->iolock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rq->__lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &c->lock irq_context: 0 &pipe->wr_wait irq_context: 0 &sig->cred_guard_mutex key#5 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->xattr_sem &mapping->i_private_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 pool_lock#2 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &c->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &pipe->wr_wait &p->pi_lock irq_context: 0 &pipe->wr_wait &p->pi_lock &rq->__lock irq_context: 0 &pipe->wr_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &n->list_lock irq_context: 0 sk_lock-AF_INET &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_es_lock key#6 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &(ei->i_block_reservation_lock) key#15 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock batched_entropy_u8.lock irq_context: 0 &vma->vm_lock->lock kfence_freelist_lock irq_context: 0 &mm->mmap_lock &meta->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &ul->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock key irq_context: 0 &vma->vm_lock->lock rcu_read_lock pcpu_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock percpu_counters_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: softirq slock-AF_INET#2 tk_core.seq.seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 kfence_freelist_lock irq_context: 0 &pipe->mutex/1 rcu_read_lock rcu_node_0 irq_context: 0 &pipe->mutex/1 rcu_read_lock &rq->__lock irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &stopper->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &stop_pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &stop_pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rq->__lock rcu_read_lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock &stopper->lock irq_context: softirq rcu_read_lock &stop_pi_lock irq_context: softirq rcu_read_lock &stop_pi_lock &rq->__lock irq_context: softirq rcu_read_lock &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->wr_wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &meta->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 kfence_freelist_lock irq_context: softirq &tx->clean_lock quarantine_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &stopper->lock irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &stop_pi_lock irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &stop_pi_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 tomoyo_ss irq_context: 0 sb_writers#3 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#3 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &mapping->i_mmap_rwsem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &journal->j_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET &mm->mmap_lock fs_reclaim irq_context: 0 sk_lock-AF_INET &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET &mm->mmap_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &____s->seqcount irq_context: 0 sb_writers#3 tomoyo_ss batched_entropy_u8.lock irq_context: 0 sb_writers#3 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#3 tomoyo_ss &meta->lock irq_context: 0 sb_writers#3 tomoyo_ss &c->lock irq_context: 0 sb_writers#3 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#3 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &journal->j_list_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu irq_context: 0 sk_lock-AF_INET remove_cache_srcu quarantine_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &c->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &n->list_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 lock#5 irq_context: softirq &(&tbl->gc_work)->timer irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &base->lock &obj_hash[i].lock irq_context: softirq (&wq_watchdog_timer) &obj_hash[i].lock irq_context: softirq (&wq_watchdog_timer) &base->lock irq_context: softirq (&wq_watchdog_timer) &base->lock &obj_hash[i].lock irq_context: softirq &c->lock batched_entropy_u8.lock irq_context: softirq &c->lock kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 batched_entropy_u16.lock crngs.lock irq_context: 0 &vma->vm_lock->lock &mm->page_table_lock irq_context: 0 kn->active#42 fs_reclaim irq_context: 0 kn->active#42 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#42 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#42 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#42 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rcu_read_lock &sighand->siglock pool_lock#2 irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock irq_context: 0 &futex_queues[i].lock irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &sighand->siglock batched_entropy_u8.lock irq_context: 0 rcu_read_lock &sighand->siglock kfence_freelist_lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &ep->lock &ep->wq irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &lock->wait_lock irq_context: 0 &f->f_pos_lock &p->lock irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 clock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 &ep->mtx sysctl_lock irq_context: 0 &f->f_pos_lock sysctl_lock irq_context: 0 &f->f_pos_lock fs_reclaim irq_context: 0 &f->f_pos_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &zone->lock irq_context: 0 &f->f_pos_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &rq->__lock irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 &ep->mtx kn->active#5 fs_reclaim irq_context: 0 &ep->mtx kn->active#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ep->mtx kn->active#5 pool_lock#2 irq_context: 0 &ep->mtx kn->active#5 &on->poll irq_context: 0 &f->f_pos_lock &p->lock &of->mutex irq_context: 0 &f->f_pos_lock &p->lock &of->mutex kn->active#5 param_lock irq_context: 0 &ep->mtx rcu_read_lock &on->poll irq_context: 0 &f->f_pos_lock &p->lock &c->lock irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock fs_reclaim irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 kn->active#5 &c->lock irq_context: 0 &f->f_pos_lock &p->lock &n->list_lock irq_context: 0 &f->f_pos_lock &p->lock &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &p->lock pool_lock#2 irq_context: 0 &f->f_pos_lock &p->lock module_mutex irq_context: 0 sk_lock-AF_INET once_mutex irq_context: 0 sk_lock-AF_INET once_mutex crngs.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &hashinfo->ehash_locks[i] irq_context: 0 sk_lock-AF_INET &rq->__lock irq_context: 0 sk_lock-AF_INET batched_entropy_u32.lock irq_context: 0 sk_lock-AF_INET batched_entropy_u16.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq slock-AF_INET#2 &base->lock irq_context: softirq slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&lapb->t1timer) &lapb->lock &____s->seqcount irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)dm_bufio_cache irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) dm_bufio_clients_lock irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) &base->lock irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) &base->lock &obj_hash[i].lock irq_context: softirq drivers/regulator/core.c:6335 irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (regulator_init_complete_work).work irq_context: 0 (wq_completion)events (regulator_init_complete_work).work &k->list_lock irq_context: 0 (wq_completion)events (regulator_init_complete_work).work &k->k_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 batched_entropy_u8.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rcu_read_lock &sighand->siglock &c->lock irq_context: 0 rcu_read_lock &sighand->siglock &____s->seqcount irq_context: 0 &ep->mtx &pipe->wr_wait irq_context: 0 rcu_read_lock tasklist_lock irq_context: 0 &ep->mtx rcu_read_lock &pipe->wr_wait irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 sysctl_lock irq_context: 0 sb_writers#4 &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &s->s_inode_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->xattr_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle inode_hash_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u32.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#41/1 irq_context: 0 &type->s_umount_key#41/1 fs_reclaim irq_context: 0 &type->s_umount_key#41/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#41/1 pool_lock#2 irq_context: 0 &type->s_umount_key#41/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#41/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#41/1 shrinker_mutex irq_context: 0 &type->s_umount_key#41/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#41/1 sb_lock irq_context: 0 &type->s_umount_key#41/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem &____s->seqcount irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 &type->s_umount_key#41/1 &sb->s_type->i_lock_key#30 irq_context: 0 &type->s_umount_key#41/1 &sb->s_type->i_lock_key#30 &dentry->d_lock irq_context: 0 &type->s_umount_key#41/1 crngs.lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#41/1 &dentry->d_lock irq_context: 0 sb_writers#8 irq_context: 0 sb_writers#8 mount_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex pool_lock#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock &wq#2 irq_context: 0 kn->active#43 fs_reclaim irq_context: 0 kn->active#43 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#43 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#43 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#43 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#43 pool_lock#2 irq_context: 0 sb_writers#8 fs_reclaim irq_context: 0 sb_writers#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &mm->mmap_lock irq_context: 0 sb_writers#8 &of->mutex irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex css_set_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock css_set_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#8 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 cgroup_mutex cpu_hotplug_lock css_set_lock irq_context: 0 cgroup_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 cgroup_mutex css_set_lock cgroup_file_kn_lock irq_context: 0 &type->s_umount_key#42/1 irq_context: 0 &type->s_umount_key#42/1 fs_reclaim irq_context: 0 &type->s_umount_key#42/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#42/1 pool_lock#2 irq_context: 0 &type->s_umount_key#42/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#42/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#42/1 shrinker_mutex irq_context: 0 &type->s_umount_key#42/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#42/1 sb_lock irq_context: 0 &type->s_umount_key#42/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#42/1 &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#42/1 &sb->s_type->i_lock_key#31 &dentry->d_lock irq_context: 0 &type->s_umount_key#42/1 crngs.lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#42/1 &dentry->d_lock irq_context: 0 rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#43 irq_context: 0 &type->s_umount_key#43 &x->wait#23 irq_context: 0 &type->s_umount_key#43 shrinker_mutex irq_context: 0 &type->s_umount_key#43 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#43 percpu_ref_switch_lock irq_context: 0 &type->s_umount_key#43 percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#43 percpu_ref_switch_lock pool_lock#2 irq_context: 0 &type->s_umount_key#43 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#43 rename_lock.seqcount irq_context: 0 &type->s_umount_key#43 &dentry->d_lock irq_context: 0 &type->s_umount_key#43 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#43 &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#43 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#43 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#43 inode_hash_lock irq_context: 0 &type->s_umount_key#43 inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#43 pool_lock#2 irq_context: 0 &type->s_umount_key#43 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#43 &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#42/1 &c->lock irq_context: 0 &type->s_umount_key#42/1 &n->list_lock irq_context: 0 &type->s_umount_key#42/1 &n->list_lock &c->lock irq_context: 0 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 cgroup_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 cgroup_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 cgroup_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 cgroup_mutex &x->wait#2 irq_context: 0 cgroup_mutex &rq->__lock irq_context: 0 cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback percpu_ref_switch_waitq.lock irq_context: softirq rcu_callback rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 cgroup_mutex.wait_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &c->lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex.wait_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &p->pi_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) percpu_ref_switch_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex cgroup_rstat_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex css_set_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) percpu_ref_switch_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &cgrp->pidlist_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) (wq_completion)cgroup_pidlist_destroy irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &wq->mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &wq->mutex &pool->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) (work_completion)(&cgrp->release_agent_work) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex css_set_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock css_set_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex css_set_lock &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex css_set_lock pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_rstat_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) pcpu_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) krc.lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &x->wait#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &rq->__lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex fs_reclaim irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex.wait_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &p->pi_lock irq_context: 0 cgroup_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cgroup_mutex &n->list_lock irq_context: 0 cgroup_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#9 irq_context: 0 sb_writers#9 mount_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 cgroup_mutex lock kernfs_idr_lock &c->lock irq_context: 0 cgroup_mutex cpuset_mutex irq_context: 0 cgroup_mutex cpuset_mutex callback_lock irq_context: 0 cgroup_mutex &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem &c->lock irq_context: 0 &type->s_umount_key#43 &obj_hash[i].lock pool_lock irq_context: 0 cgroup_mutex &dom->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex.wait_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) &p->pi_lock irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &c->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpuset_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpuset_mutex callback_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &dom->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_mutex batched_entropy_u8.lock irq_context: 0 cgroup_mutex kfence_freelist_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock &wq#2 irq_context: 0 kn->active#44 fs_reclaim irq_context: 0 kn->active#44 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#44 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#44 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#44 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 fs_reclaim irq_context: 0 sb_writers#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &mm->mmap_lock irq_context: 0 sb_writers#9 &of->mutex irq_context: 0 sb_writers#9 &obj_hash[i].lock irq_context: 0 kn->active#45 fs_reclaim irq_context: 0 kn->active#45 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#45 &c->lock irq_context: 0 kn->active#45 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#45 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#45 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &of->mutex kn->active#45 cpu_hotplug_lock irq_context: 0 sb_writers#9 &of->mutex kn->active#45 cpu_hotplug_lock cpuset_mutex irq_context: 0 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#10 irq_context: 0 sb_writers#10 &mm->mmap_lock irq_context: 0 sb_writers#10 &attr->mutex irq_context: 0 sb_writers#10 &attr->mutex &mm->mmap_lock irq_context: 0 &type->s_umount_key#44 irq_context: 0 &type->s_umount_key#44 sb_lock irq_context: 0 &type->s_umount_key#44 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock pool_lock#2 irq_context: 0 &sb->s_type->i_lock_key#26 irq_context: 0 sb_writers#11 irq_context: 0 sb_writers#11 fs_reclaim irq_context: 0 sb_writers#11 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#11 pool_lock#2 irq_context: 0 sb_writers#11 &mm->mmap_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 rename_lock.seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 fs_reclaim irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 pool_lock#2 irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &dentry->d_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &dentry->d_lock &wq irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &c->lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &pcp->lock &zone->lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &____s->seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &sb->s_type->i_lock_key#26 irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &s->s_inode_list_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 tk_core.seq.seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &sb->s_type->i_lock_key#26 &dentry->d_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 init_binfmt_misc.entries_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 rcu_read_lock &dentry->d_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &wpan_dev->association_lock irq_context: 0 rtnl_mutex dev_addr_sem irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tn->lock irq_context: 0 rtnl_mutex dev_addr_sem &sdata->sec_mtx irq_context: 0 rtnl_mutex dev_addr_sem &sdata->sec_mtx &sec->lock irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex dev_addr_sem pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem nl_table_lock irq_context: 0 rtnl_mutex dev_addr_sem rlock-AF_NETLINK irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem nl_table_wait.lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock irq_context: 0 rtnl_mutex dev_addr_sem &pn->hash_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem input_pool.lock irq_context: 0 rtnl_mutex _xmit_IEEE802154 irq_context: 0 rtnl_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &xa->xa_lock#16 irq_context: 0 &sb->s_type->i_mutex_key#10 genl_sk_destructing_waitq.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rdev->beacon_registrations_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rdev->mgmt_registrations_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &wdev->pmsr_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem reg_indoor_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem key#15 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &wb->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &sbi->s_md_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem key#3 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &k->list_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &genl_data->genl_data_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->w) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &____s->seqcount irq_context: 0 &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &p->lock &mm->mmap_lock fs_reclaim irq_context: 0 &p->lock &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &mm->mmap_lock &____s->seqcount irq_context: 0 &p->lock &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_lock nl_table_wait.lock irq_context: 0 nl_table_wait.lock &p->pi_lock irq_context: 0 nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &base->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock &rq->__lock irq_context: 0 &wb->list_lock irq_context: 0 &sbi->s_writepages_rwsem irq_context: 0 &sbi->s_writepages_rwsem &xa->xa_lock#9 irq_context: 0 &sbi->s_writepages_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &sbi->s_writepages_rwsem &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem lock#4 irq_context: 0 &sbi->s_writepages_rwsem lock#4 &lruvec->lru_lock irq_context: 0 &sbi->s_writepages_rwsem lock#5 irq_context: 0 &sbi->s_writepages_rwsem &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle lock#4 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle lock#5 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_es_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &memcg->move_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &journal->j_wait_updates irq_context: 0 &sbi->s_writepages_rwsem tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem &base->lock irq_context: 0 &sbi->s_writepages_rwsem &base->lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_es_lock key#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &base->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock irq_context: 0 &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 jbd2_handle irq_context: 0 &journal->j_wait_commit irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &journal->j_state_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &journal->j_state_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &journal->j_state_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &journal->j_state_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &ret->b_state_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle key#3 irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle key#4 irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &sbi->s_error_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &journal->j_wait_updates irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 irq_context: 0 &sb->s_type->i_mutex_key#8 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 free_vmap_area_lock irq_context: 0 &sb->s_type->i_mutex_key#8 vmap_area_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 init_mm.page_table_lock irq_context: 0 &sb->s_type->i_mutex_key#8 pcpu_alloc_mutex irq_context: 0 &sb->s_type->i_mutex_key#8 pcpu_alloc_mutex pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#8 batched_entropy_u32.lock irq_context: 0 &sb->s_type->i_mutex_key#8 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#8 free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 free_vmap_area_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 &sb->s_type->i_mutex_key#8 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock &q->requeue_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#8 &x->wait#26 irq_context: 0 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &base->lock irq_context: 0 &sb->s_type->i_mutex_key#8 &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock &rq->__lock irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#8 (&timer.timer) irq_context: 0 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &((cluster_info + ci)->lock)/1 irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex swap_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex swap_lock &p->lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex percpu_ref_switch_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex swap_lock &p->lock#2 swap_avail_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex (console_sem).lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &sb->s_type->i_mutex_key#8 proc_poll_wait.lock irq_context: 0 swap_slots_cache_enable_mutex irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-down irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-up irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &x->wait#6 irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &rq->__lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state-up swap_slots_cache_mutex irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 swap_slots_cache_enable_mutex swap_lock irq_context: 0 &____s->seqcount#4 irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 sb_writers#5 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#5 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex fs_reclaim &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#5 mount_lock irq_context: 0 &mm->mmap_lock sb_writers#5 tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock sb_writers#5 &sb->s_type->i_lock_key irq_context: 0 &mm->mmap_lock sb_writers#5 &wb->list_lock irq_context: 0 &mm->mmap_lock sb_writers#5 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 &newf->file_lock &newf->resize_wait irq_context: 0 &kcov->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &kcov->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_es_lock key#2 irq_context: 0 sb_writers#6 &rq->__lock irq_context: 0 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &kcov->lock kcov_remote_lock irq_context: 0 &kcov->lock kcov_remote_lock pool_lock#2 irq_context: 0 pid_caches_mutex irq_context: 0 pid_caches_mutex slab_mutex irq_context: 0 pid_caches_mutex slab_mutex fs_reclaim irq_context: 0 pid_caches_mutex slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pid_caches_mutex slab_mutex pool_lock#2 irq_context: 0 pid_caches_mutex slab_mutex pcpu_alloc_mutex irq_context: 0 pid_caches_mutex slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pid_caches_mutex slab_mutex &root->kernfs_rwsem irq_context: 0 pid_caches_mutex slab_mutex &k->list_lock irq_context: 0 pid_caches_mutex slab_mutex &c->lock irq_context: 0 pid_caches_mutex slab_mutex lock irq_context: 0 pid_caches_mutex slab_mutex lock kernfs_idr_lock irq_context: 0 pid_caches_mutex slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &type->s_umount_key#45 irq_context: 0 &type->s_umount_key#45 sb_lock irq_context: 0 &type->s_umount_key#45 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &c->lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock &obj_hash[i].lock irq_context: 0 bt_proto_lock &sk->sk_peer_lock irq_context: 0 bt_proto_lock hci_sk_list.lock irq_context: 0 misc_mtx &base->lock irq_context: 0 misc_mtx &base->lock &obj_hash[i].lock irq_context: 0 (work_completion)(&(&data->open_timeout)->work) irq_context: 0 &data->open_mutex irq_context: 0 &data->open_mutex fs_reclaim irq_context: 0 &data->open_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex pool_lock#2 irq_context: 0 &data->open_mutex &____s->seqcount irq_context: 0 &data->open_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex &x->wait#9 irq_context: 0 &data->open_mutex hci_index_ida.xa_lock irq_context: 0 &data->open_mutex &c->lock irq_context: 0 &data->open_mutex pcpu_alloc_mutex irq_context: 0 &data->open_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex wq_pool_mutex irq_context: 0 &data->open_mutex wq_pool_mutex &wq->mutex irq_context: 0 &data->open_mutex pin_fs_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &data->open_mutex &k->list_lock irq_context: 0 &data->open_mutex gdp_mutex irq_context: 0 &data->open_mutex gdp_mutex &k->list_lock irq_context: 0 &data->open_mutex gdp_mutex fs_reclaim irq_context: 0 &data->open_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex gdp_mutex pool_lock#2 irq_context: 0 &data->open_mutex gdp_mutex lock irq_context: 0 &data->open_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 &data->open_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &data->open_mutex lock irq_context: 0 &data->open_mutex lock kernfs_idr_lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &data->open_mutex bus_type_sem irq_context: 0 &data->open_mutex sysfs_symlink_target_lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex &dev->power.lock irq_context: 0 &data->open_mutex dpm_list_mtx irq_context: 0 &data->open_mutex uevent_sock_mutex irq_context: 0 &data->open_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &data->open_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &data->open_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex subsys mutex#74 irq_context: 0 &data->open_mutex subsys mutex#74 &k->k_lock irq_context: 0 &data->open_mutex &dev->devres_lock irq_context: 0 &data->open_mutex triggers_list_lock irq_context: 0 &data->open_mutex leds_list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex irq_context: 0 &data->open_mutex rfkill_global_mutex fs_reclaim irq_context: 0 &data->open_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex rfkill_global_mutex pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex &k->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &data->open_mutex rfkill_global_mutex bus_type_sem irq_context: 0 &data->open_mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex &____s->seqcount irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex &n->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex rfkill_global_mutex &dev->power.lock irq_context: 0 &data->open_mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 &data->open_mutex rfkill_global_mutex &rfkill->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex rfkill_global_mutex &k->k_lock irq_context: 0 &data->open_mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 &data->open_mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 &data->open_mutex rfkill_global_mutex triggers_list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex leds_list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &rfkill->lock irq_context: 0 &data->open_mutex hci_dev_list_lock irq_context: 0 &data->open_mutex tk_core.seq.seqcount irq_context: 0 &data->open_mutex hci_sk_list.lock irq_context: 0 &data->open_mutex (pm_chain_head).rwsem irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &base->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &dir->lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 kn->active#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock nl_table_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &data->open_mutex &list->lock#7 irq_context: 0 &data->open_mutex &data->read_wait irq_context: 0 &list->lock#7 irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI sock_cookie_ida.xa_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &p->alloc_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI pool_lock#2 irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI tk_core.seq.seqcount irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI hci_sk_list.lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &obj_hash[i].lock irq_context: 0 slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 hci_dev_list_lock irq_context: 0 &data->open_mutex &pcp->lock &zone->lock irq_context: 0 &data->open_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &data->open_mutex &obj_hash[i].lock pool_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 &data->open_mutex uevent_sock_mutex &c->lock irq_context: 0 &data->open_mutex uevent_sock_mutex &n->list_lock irq_context: 0 &data->open_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex.wait_lock irq_context: 0 &data->open_mutex &p->pi_lock irq_context: 0 &data->open_mutex &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback &c->lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 rtnl_mutex subsys mutex#20 &k->k_lock klist_remove_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &c->lock irq_context: 0 &list->lock#6 irq_context: 0 &data->read_wait irq_context: 0 &data->open_mutex &n->list_lock irq_context: 0 &data->open_mutex &n->list_lock &c->lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &nf_conntrack_locks[i] irq_context: 0 rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rlock-AF_NETLINK irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&br->gc_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex j1939_netdev_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &vlan_netdev_xmit_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &batadv_netdev_xmit_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_LOOPBACK#2 irq_context: 0 &xa->xa_lock#9 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 &sch->q.lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_query_lock irq_context: 0 rtnl_mutex &idev->mc_query_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (work_completion)(&(&idev->mc_report_work)->work) irq_context: 0 rtnl_mutex dev_base_lock &xa->xa_lock#4 irq_context: 0 rtnl_mutex cpu_hotplug_lock &list->lock#12 irq_context: 0 rtnl_mutex bpf_devs_lock irq_context: 0 rtnl_mutex &hwstats->hwsdev_list_lock irq_context: 0 rtnl_mutex &hwstats->hwsdev_list_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &hwstats->hwsdev_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem quarantine_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rnp->exp_wq[2] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-slock-AF_INET6 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &tbl->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem class irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem (&tbl->proxy_timer) irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &base->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem krc.lock irq_context: 0 rtnl_mutex deferred_probe_mutex irq_context: 0 rtnl_mutex device_links_lock irq_context: 0 rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &table->hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &table->hash[i].lock &table->hash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-clock-AF_INET6 irq_context: 0 rtnl_mutex &ul->lock irq_context: 0 rtnl_mutex &net->xdp.lock irq_context: 0 rtnl_mutex mirred_list_lock irq_context: 0 rtnl_mutex &ul->lock#2 pool_lock#2 irq_context: 0 rtnl_mutex &ul->lock#2 &dir->lock#2 irq_context: 0 rtnl_mutex rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_lock_key#8 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &xa->xa_lock#9 irq_context: 0 nfc_devlist_mutex &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fsnotify_mark_srcu irq_context: 0 nfc_devlist_mutex &p->pi_lock irq_context: 0 nfc_devlist_mutex &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rnp->exp_wq[0] irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex pool_lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &new->lock#2 (console_sem).lock irq_context: 0 rcu_read_lock &new->lock#2 console_lock console_srcu console_owner_lock irq_context: 0 rcu_read_lock &new->lock#2 console_lock console_srcu console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-clock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 &pool->lock &x->wait#10 irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock irq_context: 0 &hdev->req_lock pool_lock#2 irq_context: 0 &hdev->req_lock &rq->__lock irq_context: 0 &hdev->req_lock &list->lock#5 irq_context: 0 &hdev->req_lock &list->lock#6 irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->req_wait_q irq_context: 0 &hdev->req_lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &base->lock irq_context: 0 &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER (console_sem).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &list->lock#14 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_retransmit_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_send_keepalive) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_new_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_zero_key_material) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_persistent_keepalive) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (work_completion)(&peer->clear_peer_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &handshake->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &table->lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &list->lock#14 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &list->lock#14 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &r->consumer_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->socket_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nn->netlink_tap_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->mii_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->arp_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->alb_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->ad_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->mcast_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->slave_arr_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&br->hello_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&br->topology_change_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&br->tcn_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_mc_router_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_other_query.timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_other_query.delay_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_own_query.timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_mc_router_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_other_query.timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_other_query.delay_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_own_query.timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &im->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem &sighand->siglock irq_context: 0 cgroup_threadgroup_rwsem &sighand->siglock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &n->list_lock &c->lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &____s->seqcount#2 irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI hci_sk_list.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI sock_cookie_ida.xa_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI hci_sk_list.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI clock-AF_BLUETOOTH irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 &sb->s_type->i_mutex_key#10 hci_dev_list_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 &rq->__lock irq_context: 0 namespace_sem &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock krc.lock irq_context: 0 rtnl_mutex &idev->mc_report_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem namespace_sem.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem mnt_id_ida.xa_lock pool_lock#2 irq_context: 0 namespace_sem.wait_lock irq_context: 0 &type->s_umount_key#21/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#21/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock &undo_list->lock irq_context: 0 rtnl_mutex rcu_read_lock sysctl_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 &aux->poke_mutex irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 map_idr_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &data->open_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &c->lock irq_context: 0 remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 remove_cache_srcu rcu_read_lock pool_lock#2 irq_context: 0 sched_map-wait-type-override rcu_node_0 irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 cb_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 cb_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq irq_context: 0 sb_writers#4 &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem &____s->seqcount irq_context: 0 rtnl_mutex &nr_netdev_addr_lock_key irq_context: 0 rtnl_mutex listen_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 map_idr_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &meta->lock irq_context: 0 &root->kernfs_rwsem stock_lock irq_context: 0 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem key irq_context: 0 &root->kernfs_rwsem pcpu_lock irq_context: 0 &root->kernfs_rwsem percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 map_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&idev->mc_report_work)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_node_0 irq_context: 0 pernet_ops_rwsem irq_context: 0 pernet_ops_rwsem stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem crngs.lock irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock irq_context: 0 pernet_ops_rwsem fs_reclaim irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem &c->lock irq_context: 0 pernet_ops_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 pernet_ops_rwsem sysctl_lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem &dir->lock irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem nl_table_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nl_table_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem nl_table_wait.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex irq_context: 0 pernet_ops_rwsem &____s->seqcount irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem &net->rules_mod_lock irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem percpu_counters_lock irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem k-slock-AF_INET/1 irq_context: 0 pernet_ops_rwsem cache_list_lock irq_context: 0 pernet_ops_rwsem tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem &k->list_lock irq_context: 0 pernet_ops_rwsem lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem uevent_sock_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem uevent_sock_mutex pool_lock#2 irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem &n->list_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem &rq->__lock irq_context: 0 pernet_ops_rwsem &sn->pipefs_sb_lock irq_context: 0 pernet_ops_rwsem &s->s_inode_list_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_ecache_mutex irq_context: 0 pernet_ops_rwsem nf_log_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &c->lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem &base->lock irq_context: 0 pernet_ops_rwsem &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock crngs.lock irq_context: 0 pernet_ops_rwsem &hashinfo->lock#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock irq_context: softirq rcu_callback batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 rtnl_mutex &sb->s_type->i_lock_key#23 irq_context: 0 rtnl_mutex &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 rtnl_mutex &dentry->d_lock irq_context: 0 rtnl_mutex rename_lock.seqcount irq_context: 0 rtnl_mutex rcu_read_lock &dentry->d_lock irq_context: 0 rtnl_mutex &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 rtnl_mutex &dentry->d_lock &dentry->d_lock/1 irq_context: 0 rtnl_mutex &dentry->d_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &dentry->d_lock pool_lock#2 irq_context: 0 rtnl_mutex &s->s_inode_list_lock irq_context: 0 rtnl_mutex &xa->xa_lock#9 irq_context: 0 rtnl_mutex &fsnotify_mark_srcu irq_context: 0 rtnl_mutex &dentry->d_lock &lru->node[i].lock irq_context: 0 rtnl_mutex &pnn->pndevs.lock irq_context: 0 rtnl_mutex &pnn->routes.lock irq_context: 0 rtnl_mutex &pnn->routes.lock &rq->__lock irq_context: 0 rtnl_mutex &pnn->routes.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex irq_context: 0 &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 dev_base_lock irq_context: 0 lweventlist_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem stock_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem key irq_context: 0 rtnl_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem &this->receive_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &tun->lock irq_context: 0 &dir->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &tn->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &x->wait#9 irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex bus_type_sem irq_context: 0 pernet_ops_rwsem rtnl_mutex sysfs_symlink_target_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &dir->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_base_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex input_pool.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &tbl->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dir->lock#2 &obj_hash[i].lock irq_context: 0 &dir->lock#2 pool_lock#2 irq_context: 0 netdev_unregistering_wq.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 &dir->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 &dir->lock &____s->seqcount irq_context: 0 &dir->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)netns irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 css_set_lock cgroup_file_kn_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnettable->lock irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem net_rwsem irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock tasklist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->nsid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rdma_nets.xa_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem devices_rwsem irq_context: 0 pernet_ops_rwsem hwsim_netgroup_ida.xa_lock irq_context: 0 pernet_ops_rwsem rcu_node_0 irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex failover_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex.wait_lock irq_context: 0 pernet_ops_rwsem &p->pi_lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 syslog_lock &rq->__lock irq_context: 0 kn->active#7 remove_cache_srcu irq_context: 0 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 &lruvec->lru_lock irq_context: 0 &u->iolock &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &u->iolock &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &info->lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &dir->lock#2 irq_context: 0 &hdev->req_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &c->lock irq_context: 0 kn->active#7 remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &____s->seqcount irq_context: 0 kn->active#7 remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex.wait_lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &____s->seqcount irq_context: 0 &p->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &n->list_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock quarantine_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &____s->seqcount#2 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rq->__lock irq_context: 0 lweventlist_lock pool_lock#2 irq_context: 0 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &c->lock irq_context: 0 &p->lock &of->mutex kn->active#21 &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex.wait_lock irq_context: 0 rtnl_mutex dev_addr_sem stock_lock irq_context: 0 rtnl_mutex dev_addr_sem pcpu_lock stock_lock irq_context: 0 namespace_sem namespace_sem.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock irq_context: 0 namespace_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock irq_context: 0 &p->lock &of->mutex kn->active#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-clock-AF_INET6 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock sysctl_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock sysctl_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem wq_pool_mutex irq_context: 0 pernet_ops_rwsem wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem pcpu_lock irq_context: 0 pernet_ops_rwsem &list->lock#4 irq_context: 0 pernet_ops_rwsem &dir->lock#2 irq_context: 0 pernet_ops_rwsem ptype_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-clock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex crngs.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &dir->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 &disk->open_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &meta->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key/1 &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key/1 &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&idev->mc_report_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &xa->xa_lock#9 &c->lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sta->lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock irq_context: 0 &mm->mmap_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock quarantine_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &base->lock &obj_hash[i].lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &base->lock irq_context: 0 &fsnotify_mark_srcu pool_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex (inet6addr_validator_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 quarantine_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &dir->lock irq_context: 0 slock-AF_PACKET &sk->sk_lock.wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock key irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) pool_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_lock_key#8 bit_wait_table + i irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &data->open_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex kthread_create_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait#21 irq_context: 0 &x->wait#21 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#21 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &local->services_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC pool_lock#2 irq_context: 0 pernet_ops_rwsem &rxnet->conn_lock irq_context: 0 pernet_ops_rwsem &call->waitq irq_context: 0 pernet_ops_rwsem &rx->call_lock irq_context: 0 pernet_ops_rwsem &rxnet->call_lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem net_rwsem irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock sysctl_lock irq_context: 0 sb_writers#4 &____s->seqcount#10 irq_context: 0 sb_writers#4 &(&net->ipv4.ping_group_range.lock)->lock irq_context: 0 sb_writers#4 &(&net->ipv4.ping_group_range.lock)->lock &____s->seqcount#10 irq_context: 0 misc_mtx &dir->lock irq_context: 0 rtnl_mutex &r->consumer_lock irq_context: 0 rtnl_mutex &r->consumer_lock &r->producer_lock irq_context: 0 &data->open_mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 misc_mtx remove_cache_srcu irq_context: 0 misc_mtx remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx remove_cache_srcu &c->lock irq_context: 0 misc_mtx remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &n->list_lock &c->lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_highpri (work_completion)(flush) irq_context: 0 (wq_completion)events_highpri (work_completion)(flush) &list->lock#12 irq_context: 0 rtnl_mutex cpu_hotplug_lock (work_completion)(flush) irq_context: 0 rtnl_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rlock-AF_PACKET irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_node_0 irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem key irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem pcpu_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem percpu_counters_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &sem->wait_lock irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex failover_lock irq_context: 0 rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &mm->mmap_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &c->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem rdma_nets.xa_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock init_task.mems_allowed_seq.seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 rcu_read_lock pool_lock#2 irq_context: 0 &dev->mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock rcu_read_lock &rq->__lock irq_context: 0 &hdev->req_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex pcpu_lock stock_lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) rcu_node_0 irq_context: 0 &fsnotify_mark_srcu batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#7 remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#7 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#7 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#7 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) batched_entropy_u8.lock crngs.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 kfence_freelist_lock irq_context: softirq (&journal->j_commit_timer) &p->pi_lock &rq->__lock irq_context: softirq (&journal->j_commit_timer) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 ebt_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex pool_lock#2 irq_context: 0 &p->lock &of->mutex &rcu_state.expedited_wq irq_context: 0 &fsnotify_mark_srcu kfence_freelist_lock irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock quarantine_lock irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override &n->list_lock irq_context: 0 kn->active#7 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss mount_lock irq_context: 0 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &n->lock irq_context: 0 rtnl_mutex &n->lock &(&n->ha_lock)->lock irq_context: 0 rtnl_mutex &n->lock &(&n->ha_lock)->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex &tbl->lock &n->lock irq_context: 0 rtnl_mutex rcu_read_lock lock#8 irq_context: 0 rtnl_mutex rcu_read_lock id_table_lock irq_context: 0 rtnl_mutex &n->lock irq_context: 0 rtnl_mutex &n->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex &tbl->lock pool_lock#2 irq_context: 0 rtnl_mutex &tbl->lock batched_entropy_u32.lock irq_context: 0 rtnl_mutex &tbl->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex dev_addr_sem &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock nl_table_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock nl_table_wait.lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock rcu_read_lock lock#8 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock rcu_read_lock id_table_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &dir->lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock irq_context: 0 rtnl_mutex _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock &dir->lock#2 irq_context: 0 rtnl_mutex &ndev->lock pcpu_lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 rtnl_mutex &ndev->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock &____s->seqcount irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex pcpu_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &r->producer_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 &n->list_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &sem->wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex key irq_context: 0 rtnl_mutex percpu_counters_lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key pool_lock#2 irq_context: 0 rtnl_mutex (switchdev_blocking_notif_chain).rwsem irq_context: 0 rtnl_mutex &br->hash_lock irq_context: 0 rtnl_mutex &br->hash_lock &____s->seqcount irq_context: 0 rtnl_mutex &br->hash_lock &c->lock irq_context: 0 rtnl_mutex &br->hash_lock pool_lock#2 irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex &br->hash_lock nl_table_lock irq_context: 0 rtnl_mutex &br->hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->hash_lock nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex nf_hook_mutex irq_context: 0 rtnl_mutex nf_hook_mutex fs_reclaim irq_context: 0 rtnl_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex nf_hook_mutex pool_lock#2 irq_context: 0 rtnl_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &r->producer_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u32.lock crngs.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex key irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex percpu_counters_lock irq_context: 0 rtnl_mutex j1939_netdev_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &c->lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &n->list_lock &c->lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &c->lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &obj_hash[i].lock irq_context: 0 &x->wait#21 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex hrtimer_bases.lock irq_context: 0 rtnl_mutex hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock pool_lock#2 irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &c->lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.container_list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock pool_lock#2 irq_context: 0 rtnl_mutex key#16 irq_context: 0 rtnl_mutex &bat_priv->tt.changes_list_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 rtnl_mutex kernfs_idr_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex noop_qdisc.q.lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override pool_lock irq_context: softirq &(&bat_priv->nc.work)->timer irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &r->producer_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) key#17 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) key#18 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock &c->lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex tk_core.seq.seqcount irq_context: 0 rtnl_mutex &wq->mutex irq_context: 0 rtnl_mutex &wq->mutex &pool->lock irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 rtnl_mutex init_lock irq_context: 0 rtnl_mutex init_lock slab_mutex irq_context: 0 rtnl_mutex init_lock slab_mutex fs_reclaim irq_context: 0 rtnl_mutex init_lock slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex init_lock slab_mutex pool_lock#2 irq_context: 0 rtnl_mutex init_lock slab_mutex pcpu_alloc_mutex irq_context: 0 rtnl_mutex init_lock slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex init_lock slab_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex init_lock slab_mutex &k->list_lock irq_context: 0 rtnl_mutex init_lock slab_mutex lock irq_context: 0 rtnl_mutex init_lock slab_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex init_lock slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex init_lock slab_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex init_lock slab_mutex &c->lock irq_context: 0 rtnl_mutex init_lock slab_mutex &____s->seqcount irq_context: 0 rtnl_mutex init_lock fs_reclaim irq_context: 0 rtnl_mutex init_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex init_lock &zone->lock irq_context: 0 rtnl_mutex init_lock &____s->seqcount irq_context: 0 rtnl_mutex init_lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex init_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex init_lock &base->lock irq_context: 0 rtnl_mutex init_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex init_lock crngs.lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 rtnl_mutex fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 rtnl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex deferred_lock irq_context: 0 (wq_completion)events deferred_process_work irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock _xmit_ETHER irq_context: 0 rtnl_mutex &br->lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->lock &br->hash_lock pool_lock#2 irq_context: 0 rtnl_mutex &br->lock &br->hash_lock nl_table_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock pool_lock#2 irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex &pn->hash_lock irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex uevent_sock_mutex quarantine_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex deferred_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex (switchdev_blocking_notif_chain).rwsem irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sig->cred_guard_mutex quarantine_lock irq_context: 0 sb_writers#3 jbd2_handle rcu_node_0 irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &mm->mmap_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex lweventlist_lock &c->lock irq_context: 0 rtnl_mutex lweventlist_lock &____s->seqcount irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 ebt_mutex ebt_mutex.wait_lock irq_context: 0 ebt_mutex.wait_lock irq_context: 0 tomoyo_ss &base->lock irq_context: 0 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &bond->stats_lock/1 irq_context: softirq &(&slave->notify_work)->timer irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rnp->exp_wq[1] irq_context: 0 &ep->mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &disk->open_mutex &lo->lo_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (debug_obj_work).work &rq->__lock irq_context: 0 (wq_completion)events (debug_obj_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &mm->mmap_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex.wait_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx pool_lock#2 irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 sb_writers#3 &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &rq->__lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock quarantine_lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 fs_reclaim &obj_hash[i].lock irq_context: 0 fs_reclaim pool_lock#2 irq_context: 0 system_transition_mutex device_hotplug_lock irq_context: 0 sb_writers#7 tomoyo_ss pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &____s->seqcount irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xfrm.xfrm_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &bond->stats_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pmc->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&mp->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&br->mcast_gc_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_LOOPBACK irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock key#23 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex crngs.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex pool_lock#2 irq_context: 0 rtnl_mutex ptype_lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &c->lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem quarantine_lock irq_context: softirq rcu_callback key#23 irq_context: 0 (wq_completion)events_unbound (reaper_work).work &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex _xmit_NONE irq_context: 0 rtnl_mutex lock#9 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex &sem->wait_lock irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#11 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 tomoyo_ss tomoyo_policy_lock remove_cache_srcu irq_context: 0 tomoyo_ss tomoyo_policy_lock remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex net_rwsem &rq->__lock irq_context: softirq &(&bat_priv->mcast.work)->timer irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock key#16 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &bat_priv->tt.changes_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER &____s->seqcount irq_context: 0 rtnl_mutex &hsr->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rq->__lock irq_context: 0 rtnl_mutex &meta->lock irq_context: 0 rtnl_mutex pin_fs_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 proto_tab_lock &____s->seqcount#2 irq_context: 0 proto_tab_lock &pcp->lock &zone->lock irq_context: 0 proto_tab_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx (console_sem).lock irq_context: 0 &disk->open_mutex nbd_index_mutex &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &c->lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &n->list_lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 pcpu_alloc_mutex stock_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 kn->active#51 remove_cache_srcu irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu irq_context: 0 misc_mtx &wq->mutex irq_context: 0 cb_lock genl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &genl_data->genl_data_mutex &rq->__lock irq_context: 0 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 rtnl_mutex gdp_mutex lock irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 rtnl_mutex &k->k_lock irq_context: 0 rtnl_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &pcp->lock &zone->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 (console_sem).lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex rcu_read_lock mount_lock irq_context: 0 rtnl_mutex rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 rtnl_mutex mount_lock irq_context: 0 rtnl_mutex mount_lock mount_lock.seqcount irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xdp.lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex quarantine_lock irq_context: 0 rtnl_mutex sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu key irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu percpu_counters_lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sysctl_lock krc.lock &base->lock irq_context: 0 rtnl_mutex sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 key irq_context: 0 &sb->s_type->i_mutex_key#10 percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 pcpu_lock stock_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &c->lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key pool_lock#2 irq_context: 0 rtnl_mutex _xmit_IPGRE &n->list_lock irq_context: 0 rtnl_mutex _xmit_IPGRE &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) pcpu_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &____s->seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &c->lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 fs_reclaim rcu_node_0 irq_context: 0 fs_reclaim &rcu_state.expedited_wq irq_context: 0 fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle stock_lock irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 pool_lock#2 irq_context: 0 rtnl_mutex &xa->xa_lock#18 irq_context: softirq (&app->join_timer) irq_context: softirq (&app->join_timer) &app->lock irq_context: softirq (&app->join_timer) &list->lock#10 irq_context: softirq (&app->join_timer) &app->lock batched_entropy_u32.lock irq_context: softirq (&app->join_timer) &app->lock &obj_hash[i].lock irq_context: softirq (&app->join_timer) &app->lock &base->lock irq_context: softirq (&app->join_timer) &app->lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#3/1 irq_context: 0 rtnl_mutex nf_hook_mutex &c->lock irq_context: softirq (&app->join_timer)#2 irq_context: softirq (&app->join_timer)#2 &app->lock#2 irq_context: softirq (&app->join_timer)#2 &list->lock#11 irq_context: softirq (&app->join_timer)#2 batched_entropy_u32.lock irq_context: softirq (&app->join_timer)#2 &obj_hash[i].lock irq_context: softirq (&app->join_timer)#2 &base->lock irq_context: softirq (&app->join_timer)#2 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) pool_lock#2 irq_context: softirq &(&bat_priv->orig_work)->timer irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) key#19 irq_context: 0 (wq_completion)events_power_efficient (gc_work).work irq_context: 0 (wq_completion)events_power_efficient (gc_work).work tk_core.seq.seqcount irq_context: 0 (wq_completion)events_power_efficient (gc_work).work "ratelimiter_table_lock" irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &base->lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex kfence_freelist_lock irq_context: 0 rtnl_mutex rcu_read_lock &tap_major->minor_lock irq_context: 0 rtnl_mutex rcu_read_lock &tap_major->minor_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &tap_major->minor_lock pool_lock#2 irq_context: 0 rtnl_mutex req_lock irq_context: 0 rtnl_mutex &x->wait#11 irq_context: 0 rtnl_mutex subsys mutex#75 irq_context: 0 rtnl_mutex subsys mutex#75 &k->k_lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work pool_lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wg->socket_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &x->wait#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex &rq->__lock irq_context: 0 sb_writers#7 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &table->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &genl_data->genl_data_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &pcp->lock &zone->lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 &dev->mutex kernfs_idr_lock pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock stock_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events &rq->__lock irq_context: 0 (wq_completion)events &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#5 &rq->__lock cpu_asid_lock irq_context: 0 rcu_read_lock &sma->sems[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 kn->active#46 fs_reclaim irq_context: 0 kn->active#46 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock irq_context: 0 kn->active#47 fs_reclaim irq_context: 0 kn->active#47 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#47 &c->lock irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock nsim_bus_dev_ids.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex nf_hook_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex device_links_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fwnode_link_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex device_links_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &dev->devres_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_maps_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock irq_context: 0 (wq_completion)events (debug_obj_work).work &meta->lock irq_context: 0 (wq_completion)events (debug_obj_work).work kfence_freelist_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem stock_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem key irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem percpu_counters_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_lock stock_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fill_pool_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_ETHER/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx &dev->mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: softirq (&p->forward_delay_timer) irq_context: softirq (&p->forward_delay_timer) &br->lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &c->lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock pool_lock#2 irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock nl_table_lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &obj_hash[i].lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock nl_table_wait.lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &n->list_lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem rcu_node_0 irq_context: 0 cb_lock genl_mutex &base->lock irq_context: 0 cb_lock genl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rq->__lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &rq->__lock cpu_asid_lock irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events fqdir_free_work &base->lock irq_context: softirq rcu_read_lock rcu_read_lock &meta->lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events fqdir_free_work &base->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock batched_entropy_u8.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_event_queue_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock nsim_bus_dev_list_lock.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock (&timer.timer) irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex kfence_freelist_lock irq_context: softirq (&app->join_timer)#2 batched_entropy_u32.lock crngs.lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &____s->seqcount#2 irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &base->lock irq_context: 0 &mm->mmap_lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#51 remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#9 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex (inetaddr_validator_chain).rwsem &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_validator_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &tn->lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 sb_writers#8 remove_cache_srcu irq_context: 0 sb_writers#8 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#8 remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock irq_context: 0 loop_validate_mutex &rq->__lock irq_context: 0 loop_validate_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#8 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#8 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#8 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sn->gssp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &cd->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem cache_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem cache_list_lock &cd->hash_lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->can.stattimer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem vmap_area_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem xfrm_state_gc_work irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->xfrm.xfrm_state_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &hashinfo->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &lru->node[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&net->ipv6.addr_chk_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ip6_fl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->rules_mod_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->ipv6.ip6_fib_timer) irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &c->lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&mrt->ipmr_expire_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&ipvs->dest_trash_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ipvs->expire_nodest_conn_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ipvs->defense_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ipvs->est_reload_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nfnl_subsys_ipset irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem recent_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hashlimit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem trans_gc_work irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_log_mutex irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &group->mark_mutex remove_cache_srcu irq_context: 0 &group->mark_mutex remove_cache_srcu quarantine_lock irq_context: 0 &group->mark_mutex remove_cache_srcu &c->lock irq_context: 0 &group->mark_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex deferred_probe_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex probe_waitqueue.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock subsys mutex#76 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &rq->__lock irq_context: 0 kn->active#47 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &____s->seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex devnet_rename_sem &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_IPGRE#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 pcpu_alloc_mutex key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_node_0 irq_context: 0 ebt_mutex &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock kfence_freelist_lock irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq irq_context: 0 (wq_completion)events fqdir_free_work &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &group->mark_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &group->mark_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &group->mark_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &nf_conntrack_locks[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &____s->seqcount#7 irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &____s->seqcount irq_context: softirq (&app->join_timer) &app->lock batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 rtnl_mutex &devlink_port->type_lock irq_context: 0 cb_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &fn->fou_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq &(&hwstats->traffic_dw)->timer irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &hwstats->hwsdev_list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &data->fib_event_queue_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fs_reclaim &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pcpu_alloc_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &table->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &peer->endpoint_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex.wait_lock irq_context: 0 cb_lock genl_mutex &p->pi_lock irq_context: 0 cb_lock genl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->sync_mutex irq_context: 0 rtnl_mutex subsys mutex#20 &rq->__lock irq_context: 0 kn->active#46 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (inetaddr_chain).rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem inet6addr_chain.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wdev->pmsr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 cb_lock genl_mutex &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &local->active_txq_lock[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (work_completion)(&sta->drv_deliver_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ifibss->incomplete_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx hrtimer_bases.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &data->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &local->queue_stop_reason_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &fq->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &data->fib_event_queue_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &x->wait#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&ifibss->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &n->list_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &nbd->config_lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_PACKET rcu_node_0 irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock rlock-AF_PACKET irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex.wait_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &p->lock stock_lock irq_context: 0 tasklist_lock &sighand->siglock &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem key#25 irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_nat_proto_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 nf_nat_proto_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 br_ioctl_mutex &p->pi_lock &rq->__lock irq_context: 0 &p->lock pcpu_lock stock_lock irq_context: 0 &x->wait#8 &p->pi_lock irq_context: 0 &x->wait#8 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#8 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock batched_entropy_u8.lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex devnet_rename_sem &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock rlock-AF_PACKET irq_context: 0 proto_tab_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &data->fib_event_queue_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &data->fib_event_queue_lock irq_context: softirq &(&hdev->cmd_timer)->timer irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &wg->socket_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex devnet_rename_sem &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock kfence_freelist_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex &n->list_lock irq_context: 0 misc_mtx &dev->mutex &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock kfence_freelist_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &hsr->list_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock kfence_freelist_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &meta->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 remove_cache_srcu rcu_node_0 irq_context: 0 &f->f_pos_lock &rq->__lock cpu_asid_lock irq_context: 0 &vma->vm_lock->lock &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx pool_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &mm->mmap_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &rq->__lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback kfence_freelist_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 &disk->open_mutex &nbd->config_lock kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex quarantine_lock irq_context: 0 &ctx->cancel_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &meta->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->bat_v.ogm_buff_mutex quarantine_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 kfence_freelist_lock irq_context: 0 misc_mtx nfc_devlist_mutex bus_type_sem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_node_0 irq_context: 0 (work_completion)(&local->tx_work) &rq->__lock irq_context: 0 (work_completion)(&local->tx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 prog_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_VSOCK &rq->__lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &rq->__lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &n->list_lock irq_context: 0 &dev->mutex remove_cache_srcu irq_context: 0 &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 &dev->mutex remove_cache_srcu &c->lock irq_context: 0 &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &dev->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &dev->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_node_0 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock stock_lock irq_context: 0 rtnl_mutex rcu_read_lock key irq_context: 0 rtnl_mutex rcu_read_lock pcpu_lock irq_context: softirq (&app->periodic_timer) irq_context: softirq (&app->periodic_timer) &app->lock irq_context: softirq (&app->periodic_timer) &app->lock &obj_hash[i].lock irq_context: softirq (&app->periodic_timer) &app->lock &base->lock irq_context: softirq (&app->periodic_timer) &app->lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 &c->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &base->lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->page_table_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) &rq->__lock irq_context: 0 &dev->mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_VSOCK &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex subsys mutex#39 &rq->__lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&wdev->disconnect_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&wdev->pmsr_free_wk) irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 lock prog_idr_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 rtnl_mutex _xmit_SIT irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem quarantine_lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 lock prog_idr_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 lock prog_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 lock prog_idr_lock &____s->seqcount irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events pcpu_balance_work &p->pi_lock irq_context: 0 (wq_completion)events pcpu_balance_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#47 remove_cache_srcu irq_context: 0 kn->active#47 remove_cache_srcu quarantine_lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 nfc_devlist_mutex subsys mutex#39 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &c->lock irq_context: 0 &mm->mmap_lock key irq_context: 0 &mm->mmap_lock pcpu_lock irq_context: 0 &mm->mmap_lock percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &____s->seqcount#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &____s->seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &n->list_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 &ids->rwsem rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)events pcpu_balance_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock nsim_bus_dev_list_lock.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock &c->lock irq_context: 0 remove_cache_srcu stock_lock irq_context: 0 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock key#23 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rtnl_mutex rcu_read_lock percpu_counters_lock irq_context: 0 rtnl_mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock irq_context: softirq rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &meta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock cpu_asid_lock irq_context: 0 &iopt->domains_rwsem &iopt->iova_rwsem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 sb_writers#7 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#4 &rq->__lock cpu_asid_lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock cpu_asid_lock irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (debug_obj_work).work &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &iopt->domains_rwsem &iopt->iova_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu irq_context: 0 system_transition_mutex (pm_chain_head).rwsem thermal_list_lock &rq->__lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem thermal_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &mapping->i_private_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle key irq_context: 0 sb_writers#3 sb_internal jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle percpu_counters_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &c->lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex pool_lock#2 irq_context: 0 sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &n->list_lock irq_context: 0 misc_mtx &dev->mutex &c->lock irq_context: 0 nfc_devlist_mutex &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &n->list_lock irq_context: 0 &dev->mutex &n->list_lock &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &n->list_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &c->lock irq_context: 0 misc_mtx &dev->mutex &rq->__lock irq_context: 0 misc_mtx &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &wq->mutex &pool->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &rq->__lock irq_context: 0 misc_mtx &wq->mutex &x->wait#10 irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss pcpu_lock stock_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &meta->lock irq_context: 0 (wq_completion)events free_ipc_work quarantine_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &br->lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock nl_table_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &br->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock &base->lock irq_context: 0 rtnl_mutex &br->lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &hdev->lock &n->list_lock &c->lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock (worker)->lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex &n->list_lock irq_context: 0 pcpu_alloc_mutex percpu_counters_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 kfence_freelist_lock irq_context: 0 rtnl_mutex devnet_rename_sem rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex subsys mutex#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock rcu_node_0 irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key &n->list_lock &c->lock irq_context: softirq (&brmctx->ip6_own_query.timer) irq_context: softirq (&brmctx->ip6_own_query.timer) &br->multicast_lock irq_context: softirq (&brmctx->ip4_own_query.timer) irq_context: softirq (&brmctx->ip4_own_query.timer) &br->multicast_lock irq_context: softirq (&in_dev->mr_ifc_timer) irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &dir->lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &ul->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount#7 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &dir->lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &tbl->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &n->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount#8 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) batched_entropy_u32.lock irq_context: softirq (&in_dev->mr_ifc_timer) &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) &base->lock irq_context: softirq (&in_dev->mr_ifc_timer) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&cnet->ecache.dwork)->work) irq_context: 0 rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &meta->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq &(&br->gc_work)->timer irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) &base->lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex _xmit_TUNNEL irq_context: 0 rtnl_mutex _xmit_IPGRE irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &n->list_lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex &rq->__lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal &base->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem pcpu_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &obj_hash[i].lock pool_lock irq_context: 0 tasklist_lock rcu_read_lock &p->pi_lock irq_context: 0 tasklist_lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex dpm_list_mtx &rq->__lock irq_context: 0 &dev->mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 cb_lock genl_mutex genl_mutex.wait_lock irq_context: 0 cb_lock genl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex.wait_lock irq_context: 0 cb_lock &p->pi_lock irq_context: 0 cb_lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock crngs.lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rt6_exception_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 &c->lock irq_context: 0 sb_writers &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF stock_lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &u->iolock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 batched_entropy_u8.lock irq_context: 0 sb_writers#7 kn->active#4 kfence_freelist_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &pcp->lock &zone->lock irq_context: 0 misc_mtx pool_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex &tb->tb6_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle pcpu_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_wq[1] irq_context: 0 &sb->s_type->i_mutex_key#10 &net->ipv4.ra_mutex irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &tb->tb6_lock kfence_freelist_lock irq_context: 0 rtnl_mutex devnet_rename_sem stock_lock irq_context: 0 rtnl_mutex devnet_rename_sem key irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_owner_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rcu_state.expedited_wq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: softirq &(&conn->disc_work)->timer irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock pool_lock#2 irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->list_lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->list_lock &c->lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->lock irq_context: 0 rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock nl_table_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock nl_table_wait.lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock rcu_read_lock lock#8 irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock rcu_read_lock id_table_lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &dir->lock#2 irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock irq_context: 0 rtnl_mutex dev_hotplug_mutex &rq->__lock irq_context: 0 rtnl_mutex dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fs_reclaim stock_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)mm_percpu_wq &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)mm_percpu_wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &pcp->lock &zone->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 rtnl_mutex _xmit_IPGRE &c->lock irq_context: 0 rtnl_mutex _xmit_IPGRE &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_TUNNEL6 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: hardirq &x->wait#14 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 &dev->mutex leds_list_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u32.lock crngs.lock irq_context: 0 &dev->mutex leds_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &n->list_lock irq_context: 0 rtnl_mutex &ndev->lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &ndev->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &ndev->lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bond->stats_lock/1 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex (inet6addr_validator_chain).rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->ipv6.addrconf_hash_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ifa->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock crngs.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 &mm->mmap_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock deferred_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock (console_sem).lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&pmctx->ip6_own_query.timer) irq_context: softirq (&pmctx->ip6_own_query.timer) &br->multicast_lock irq_context: softirq (&pmctx->ip4_own_query.timer) irq_context: softirq (&pmctx->ip4_own_query.timer) &br->multicast_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events deferred_process_work &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock &br->hash_lock irq_context: softirq rcu_read_lock &br->hash_lock pool_lock#2 irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock &br->hash_lock &____s->seqcount irq_context: softirq rcu_read_lock &br->hash_lock nl_table_lock irq_context: softirq rcu_read_lock &br->hash_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->hash_lock nl_table_wait.lock irq_context: softirq rcu_read_lock &br->multicast_lock irq_context: softirq rcu_read_lock &br->multicast_lock pool_lock#2 irq_context: softirq rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: softirq rcu_read_lock &br->multicast_lock &dir->lock#2 irq_context: softirq rcu_read_lock &br->multicast_lock deferred_lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->multicast_lock nl_table_lock irq_context: softirq rcu_read_lock &br->multicast_lock nl_table_wait.lock irq_context: softirq rcu_read_lock &br->multicast_lock &base->lock irq_context: softirq rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->multicast_lock &c->lock irq_context: softirq rcu_read_lock &br->multicast_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock &br->multicast_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock &br->multicast_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem quarantine_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock &br->hash_lock &c->lock irq_context: softirq rcu_read_lock &br->hash_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock &br->hash_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock deferred_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock nl_table_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock nl_table_wait.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &____s->seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 nfc_devlist_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &cfs_rq->removed.lock irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx hrtimer_bases.lock irq_context: 0 misc_mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &hsr->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&hsr->announce_timer) irq_context: softirq (&hsr->announce_timer) rcu_read_lock &c->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &obj_hash[i].lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &obj_hash[i].lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &base->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock key irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &nn->netlink_tap_lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex &n->list_lock &c->lock irq_context: softirq rcu_read_lock &br->multicast_lock &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex j1939_netdev_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &____s->seqcount irq_context: 0 &vma->vm_lock->lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start stock_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bat_priv->tt.changes_list_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bat_priv->tt.changes_list_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bat_priv->tt.changes_list_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock key#16 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &____s->seqcount irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &____s->seqcount irq_context: 0 sb_writers#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex nbd_index_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &____s->seqcount irq_context: 0 rcu_read_lock &n->list_lock irq_context: 0 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex quarantine_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER &pcp->lock &zone->lock irq_context: 0 rtnl_mutex _xmit_ETHER &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER (console_sem).lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex _xmit_ETHER (console_sem).lock irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock pool_lock#2 irq_context: 0 fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &ndev->lock &n->list_lock irq_context: 0 rtnl_mutex &ndev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq rcu_read_lock &list->lock#13 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &list->lock#13 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 rtnl_mutex key#20 irq_context: 0 rtnl_mutex &bat_priv->tt.commit_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &sb->s_type->i_lock_key#8 irq_context: 0 rtnl_mutex &dir->lock irq_context: 0 rtnl_mutex k-slock-AF_INET/1 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET irq_context: 0 rtnl_mutex k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &table->hash[i].lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &table->hash[i].lock k-clock-AF_INET irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex k-slock-AF_INET#2 irq_context: 0 rtnl_mutex rcu_read_lock (console_sem).lock irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &n->list_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &tb->tb6_lock &n->list_lock irq_context: 0 rtnl_mutex &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 rtnl_mutex k-slock-AF_INET6 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock irq_context: 0 rtnl_mutex &wg->device_update_lock fs_reclaim irq_context: 0 rtnl_mutex &wg->device_update_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &wg->device_update_lock pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &wg->device_update_lock &sb->s_type->i_lock_key#8 irq_context: 0 rtnl_mutex &wg->device_update_lock &dir->lock irq_context: 0 rtnl_mutex &wg->device_update_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-slock-AF_INET/1 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &table->hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &table->hash[i].lock k-clock-AF_INET irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-slock-AF_INET#2 irq_context: 0 rtnl_mutex &wg->device_update_lock cpu_hotplug_lock irq_context: 0 rtnl_mutex &wg->device_update_lock cpu_hotplug_lock jump_label_mutex irq_context: 0 rtnl_mutex &wg->device_update_lock cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-slock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock &wg->socket_update_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock &list->lock#14 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#14 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &mapping->i_private_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_send_keepalive) batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 batched_entropy_u32.lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &n->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 rtnl_mutex &wg->device_update_lock &c->lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 quarantine_lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem &rq->__lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal quarantine_lock irq_context: 0 rcu_read_lock &obj_hash[i].lock pool_lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &base->lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex nbd_index_mutex.wait_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &lock->wait_lock irq_context: 0 uuid_mutex irq_context: 0 cb_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 kn->active#49 &____s->seqcount#2 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) stock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock &r->producer_lock#2 irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &base->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 map_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 br_ioctl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 file_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock_bh quarantine_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock batched_entropy_u8.lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem stock_lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 key irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 key#24 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 nf_sockopt_mutex nf_sockopt_mutex.wait_lock irq_context: 0 nf_sockopt_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex &mm->mmap_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock stock_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pcpu_lock stock_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM rcu_read_lock tk_core.seq.seqcount irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#4 &mm->mmap_lock &rq->__lock irq_context: 0 sb_writers#4 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 percpu_counters_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 &____s->seqcount#2 irq_context: 0 &mm->mmap_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &mapping->i_private_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem quarantine_lock irq_context: softirq (&timer) irq_context: softirq (&timer) &obj_hash[i].lock irq_context: softirq (&timer) &base->lock irq_context: softirq (&timer) &base->lock &obj_hash[i].lock irq_context: softirq (&timer) rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&timer) rcu_read_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&timer) rcu_read_lock &n->list_lock irq_context: softirq (&timer) rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&timer) &txlock irq_context: softirq (&timer) &txlock &list->lock#3 irq_context: softirq (&timer) &txwq irq_context: softirq (&timer) &txwq &p->pi_lock irq_context: softirq (&timer) &txwq &p->pi_lock &rq->__lock irq_context: softirq (&timer) &txwq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock_bh &list->lock#12 irq_context: 0 rcu_read_lock_bh &pcp->lock &zone->lock irq_context: softirq &keypair->receiving_counter.lock irq_context: softirq &peer->keypairs.keypair_update_lock irq_context: softirq &list->lock#14 irq_context: 0 rcu_read_lock_bh &____s->seqcount irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 &eql->queue.lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 &eql->queue.lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 &eql->queue.lock pool_lock#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM (console_sem).lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner &port_lock_key irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner console_owner_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM pool_lock#2 irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key nr_node_list_lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key pool_lock#2 irq_context: 0 rcu_read_lock_bh _xmit_X25#2 irq_context: 0 rcu_read_lock_bh _xmit_X25#2 &lapbeth->up_lock irq_context: 0 rcu_read_lock_bh _xmit_X25#2 &lapbeth->up_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh _xmit_X25#2 &lapbeth->up_lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock rcu_read_lock &ndev->lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 remove_cache_srcu key irq_context: 0 remove_cache_srcu pcpu_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &pcp->lock &zone->lock irq_context: 0 remove_cache_srcu percpu_counters_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_node_0 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx system_transition_mutex &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 pool_lock#2 irq_context: softirq rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu key irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 pcpu_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 percpu_counters_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 pcpu_lock stock_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &obj_hash[i].lock irq_context: 0 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &meta->lock irq_context: 0 rtnl_mutex &wg->device_update_lock tk_core.seq.seqcount irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex quarantine_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &root->deactivate_waitq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &obj_hash[i].lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu pool_lock#2 irq_context: softirq rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq rcu_read_lock_bh &base->lock irq_context: 0 misc_mtx rfkill_global_mutex irq_context: 0 misc_mtx rfkill_global_mutex fs_reclaim irq_context: 0 misc_mtx rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx rfkill_global_mutex pool_lock#2 irq_context: 0 misc_mtx rfkill_global_mutex &rfkill->lock irq_context: 0 misc_mtx rfkill_global_mutex &data->mtx irq_context: 0 cb_lock genl_mutex rcu_read_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 cb_lock rcu_read_lock pool_lock#2 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 cb_lock genl_mutex hwsim_radio_lock irq_context: 0 cb_lock genl_mutex &x->wait#9 irq_context: 0 cb_lock genl_mutex batched_entropy_u32.lock irq_context: 0 cb_lock genl_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex gdp_mutex irq_context: 0 cb_lock genl_mutex gdp_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem irq_context: 0 tomoyo_ss remove_cache_srcu key irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 tomoyo_ss remove_cache_srcu pcpu_lock irq_context: 0 cb_lock genl_mutex bus_type_sem irq_context: 0 tomoyo_ss remove_cache_srcu percpu_counters_lock irq_context: 0 cb_lock genl_mutex sysfs_symlink_target_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &dev->power.lock irq_context: 0 cb_lock genl_mutex dpm_list_mtx irq_context: 0 cb_lock genl_mutex uevent_sock_mutex irq_context: 0 cb_lock genl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex subsys mutex#53 irq_context: 0 cb_lock genl_mutex subsys mutex#53 &k->k_lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex device_links_lock irq_context: 0 cb_lock genl_mutex &k->k_lock irq_context: 0 cb_lock genl_mutex deferred_probe_mutex irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex wq_pool_mutex irq_context: 0 cb_lock genl_mutex wq_pool_mutex &wq->mutex irq_context: 0 cb_lock genl_mutex crngs.lock irq_context: 0 cb_lock genl_mutex triggers_list_lock irq_context: 0 cb_lock genl_mutex leds_list_lock irq_context: 0 cb_lock genl_mutex &zone->lock irq_context: 0 cb_lock genl_mutex &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex param_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex param_lock rate_ctrl_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex (console_sem).lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &k->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_power_efficient &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex kobj_ns_type_lock irq_context: 0 (wq_completion)events_power_efficient &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx bus_type_sem irq_context: 0 &sb->s_type->i_mutex_key#9 &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx sysfs_symlink_target_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &dev->power.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &k->k_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 batched_entropy_u8.lock irq_context: softirq rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx reg_requests_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rfkill_global_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex bus_type_sem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &dev->power.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &rfkill->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &k->k_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex triggers_list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex leds_list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex.wait_lock irq_context: 0 cb_lock genl_mutex pin_fs_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx stack_depot_init_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#4 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &list->lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &tn->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &x->wait#9 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &dir->lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &dev->power.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_base_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx input_pool.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx batched_entropy_u32.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &tbl->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx sysctl_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx failover_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx proc_subdir_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx proc_inum_ida.xa_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx proc_subdir_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &pnettable->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx smc_ib_devices.mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &ndev->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &local->iflist_mtx irq_context: 0 cb_lock genl_mutex (inetaddr_chain).rwsem irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex inet6addr_chain.lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rhashtable_bucket irq_context: 0 cb_lock genl_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex nl_table_wait.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rdev->mgmt_registrations_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx (&dwork->timer) irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &base->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &s->s_inode_list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#9 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &dentry->d_lock/1 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx (&dwork->timer)#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx (work_completion)(&(&link->color_collision_detect_work)->work) irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 cb_lock rtnl_mutex.wait_lock irq_context: 0 cb_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 &p->lock remove_cache_srcu &____s->seqcount irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events &rq->__lock irq_context: 0 (wq_completion)bat_events &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock &rdev->wiphy.mtx rtnl_mutex.wait_lock irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock irq_context: 0 cb_lock &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 cb_lock genl_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &list->lock#15 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &ifibss->incomplete_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx (console_sem).lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner &port_lock_key irq_context: 0 cb_lock genl_mutex rtnl_mutex console_owner_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex console_owner irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner console_owner_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &data->mutex irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)cfg80211 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &list->lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events wireless_nlevent_work irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem pool_lock#2 irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem nl_table_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 &data->open_mutex remove_cache_srcu irq_context: 0 &data->open_mutex remove_cache_srcu quarantine_lock irq_context: 0 &data->open_mutex remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &____s->seqcount irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 cb_lock rtnl_mutex &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock hwsim_radio_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 cb_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: softirq &list->lock#16 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &____s->seqcount irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock &lock->wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &list->lock#15 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &ifibss->incomplete_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx (console_sem).lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_owner_lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &data->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &lock->wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem pool_lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#46/1 irq_context: 0 &type->s_umount_key#46/1 fs_reclaim irq_context: 0 &type->s_umount_key#46/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 pool_lock#2 irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#46/1 shrinker_mutex irq_context: 0 &type->s_umount_key#46/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#46/1 sb_lock irq_context: 0 &type->s_umount_key#46/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#46/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_lock_key#32 irq_context: 0 &type->s_umount_key#46/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#46/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_lock_key#32 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 binderfs_minors_mutex irq_context: 0 &type->s_umount_key#46/1 binderfs_minors_mutex binderfs_minors.xa_lock irq_context: 0 &type->s_umount_key#46/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &sb->s_type->i_lock_key#32 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rename_lock.seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 fs_reclaim irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 pool_lock#2 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &dentry->d_lock &wq irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &sb->s_type->i_lock_key#32 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &c->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rcu_read_lock iunique_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 &root->kernfs_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex pool_lock#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &obj_hash[i].lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex css_set_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex cgroup_file_kn_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock cgroup_idr_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex cgroup_idr_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex task_group_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &rq->__lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 &type->i_mutex_dir_key#6 irq_context: 0 &type->i_mutex_dir_key#6 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#6 &c->lock irq_context: 0 &type->i_mutex_dir_key#6 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#6 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#6 &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#6 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#6 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 irq_context: 0 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock &wq irq_context: 0 kn->active#48 fs_reclaim irq_context: 0 kn->active#48 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#49 fs_reclaim irq_context: 0 kn->active#49 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#49 &c->lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &x->wait#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock (console_sem).lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner &port_lock_key irq_context: softirq rcu_callback rlock-AF_NETLINK irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex kfence_freelist_lock irq_context: softirq rcu_read_lock &meta->lock irq_context: softirq rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock hwsim_radio_lock pool_lock#2 irq_context: softirq rcu_read_lock hwsim_radio_lock &c->lock irq_context: softirq rcu_read_lock hwsim_radio_lock &list->lock#16 irq_context: softirq rcu_read_lock lock#6 irq_context: softirq rcu_read_lock lock#6 kcov_remote_lock irq_context: softirq rcu_read_lock &local->rx_path_lock irq_context: softirq rcu_read_lock &local->rx_path_lock &list->lock#15 irq_context: softirq rcu_read_lock &local->rx_path_lock &rdev->wiphy_work_lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 kcov_remote_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pin_fs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock krc.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh key#20 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &entry->crc_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem inode_hash_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &s->s_inode_list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &xa->xa_lock#9 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &fsnotify_mark_srcu irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &base->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events kernfs_notify_work &root->kernfs_supers_rwsem inode_hash_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex (wq_completion)cpuset_migrate_mm irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &wq->mutex irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &wq->mutex &pool->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &wq->mutex &x->wait#10 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex css_set_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock cgroup_idr_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cgroup_idr_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex jump_label_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex jump_label_mutex patch_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex callback_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount#2 irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount irq_context: 0 &____s->seqcount#2 irq_context: 0 rtnl_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex percpu_counters_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex pool_lock#2 irq_context: 0 rcu_read_lock &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &____s->seqcount#2 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key#7 irq_context: 0 &type->i_mutex_dir_key#7 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#7 &c->lock irq_context: 0 &type->i_mutex_dir_key#7 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#7 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &rq->__lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &sb->s_type->i_lock_key#31 irq_context: 0 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 irq_context: 0 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock &wq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &____s->seqcount#2 irq_context: 0 kn->active#50 fs_reclaim irq_context: 0 kn->active#50 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->alloc_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->alloc_lock &____s->seqcount#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex cpuset_attach_wq.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &obj_hash[i].lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock pool_lock#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock krc.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex (wq_completion)cpuset_migrate_mm irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex &pool->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex &x->wait#10 irq_context: 0 stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu pcpu_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &xa->xa_lock#5 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &c->lock irq_context: 0 kn->active#51 fs_reclaim irq_context: 0 kn->active#51 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#51 stock_lock irq_context: 0 kn->active#51 pool_lock#2 irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#52 fs_reclaim irq_context: 0 kn->active#52 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#52 stock_lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &c->lock irq_context: 0 sb_writers#9 &of->mutex kn->active#52 memcg_max_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &xa->xa_lock#5 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex devcgroup_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock freezer_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount#2 irq_context: 0 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq rcu_callback cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: softirq rcu_callback cgroup_threadgroup_rwsem.rss.gp_wait.lock &obj_hash[i].lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 tomoyo_ss &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &p->pi_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 rtnl_mutex &tb->tb6_lock &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key#7 stock_lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &c->lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem stock_lock irq_context: 0 kn->active#50 stock_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex freezer_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->alloc_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->alloc_lock &newf->file_lock irq_context: 0 &xa->xa_lock#5 pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex &____s->seqcount irq_context: 0 nf_hook_mutex irq_context: 0 nf_hook_mutex fs_reclaim irq_context: 0 nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nf_hook_mutex stock_lock irq_context: 0 nf_hook_mutex pool_lock#2 irq_context: 0 ebt_mutex &mm->mmap_lock irq_context: 0 ebt_mutex &c->lock irq_context: 0 ebt_mutex &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 ebt_mutex &mm->mmap_lock fs_reclaim irq_context: 0 ebt_mutex &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ebt_mutex &mm->mmap_lock &____s->seqcount irq_context: 0 ebt_mutex &mm->mmap_lock stock_lock irq_context: 0 ebt_mutex &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ids->rwsem fs_reclaim &rq->__lock irq_context: 0 &ids->rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu pool_lock#2 irq_context: 0 &xt[i].mutex &mm->mmap_lock irq_context: 0 &xt[i].mutex free_vmap_area_lock irq_context: 0 &xt[i].mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex free_vmap_area_lock &____s->seqcount irq_context: 0 &xt[i].mutex free_vmap_area_lock pool_lock#2 irq_context: 0 &xt[i].mutex vmap_area_lock irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &c->lock irq_context: 0 &xt[i].mutex &per_cpu(xt_recseq, i) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex rcu_node_0 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex rcu_read_lock pool_lock#2 irq_context: 0 &xt[i].mutex &obj_hash[i].lock irq_context: 0 misc_mtx rfkill_global_mutex &rq->__lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock irq_context: 0 misc_mtx rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&tun->flow_gc_timer) irq_context: softirq (&tun->flow_gc_timer) &tun->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock stock_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock irq_context: 0 nf_nat_proto_mutex irq_context: 0 nf_nat_proto_mutex fs_reclaim irq_context: 0 nf_nat_proto_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nf_nat_proto_mutex pool_lock#2 irq_context: 0 nf_nat_proto_mutex nf_hook_mutex irq_context: 0 nf_nat_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 nf_nat_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock key#16 irq_context: 0 nf_nat_proto_mutex nf_hook_mutex stock_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock irq_context: 0 nf_nat_proto_mutex &obj_hash[i].lock irq_context: 0 nf_nat_proto_mutex stock_lock irq_context: 0 &xt[i].mutex &rq->__lock irq_context: 0 &xt[i].mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_hook_mutex &c->lock irq_context: 0 nf_hook_mutex &____s->seqcount#2 irq_context: 0 nf_hook_mutex &____s->seqcount irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &c->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &n->list_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.changes_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.changes_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.changes_list_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &c->lock irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &____s->seqcount#2 irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override &c->lock irq_context: 0 &pipe->mutex/1 stock_lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 irq_context: 0 &vma->vm_lock->lock &sb->s_type->i_lock_key irq_context: 0 &vma->vm_lock->lock &info->lock irq_context: 0 &vma->vm_lock->lock lock#4 irq_context: 0 &vma->vm_lock->lock tk_core.seq.seqcount irq_context: 0 &vma->vm_lock->lock mount_lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &ndev->lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &xa->xa_lock#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &xa->xa_lock#5 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle stock_lock irq_context: 0 loop_validate_mutex irq_context: 0 loop_validate_mutex &lo->lo_mutex irq_context: 0 &fsnotify_mark_srcu fs_reclaim irq_context: 0 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 &fsnotify_mark_srcu &group->notification_lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->deactivate_waitq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &xt[i].mutex purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &xt[i].mutex init_mm.page_table_lock irq_context: 0 &xt[i].mutex &n->list_lock irq_context: 0 &xt[i].mutex &n->list_lock &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &mm->mmap_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem stock_lock irq_context: 0 &type->i_mutex_dir_key#4 &____s->seqcount#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &____s->seqcount#2 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &____s->seqcount#2 irq_context: softirq rcu_callback stock_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock irq_context: 0 &group->notification_waitq &ep->lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &xa->xa_lock#5 irq_context: 0 &sb->s_type->i_mutex_key#9 &xa->xa_lock#5 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 stock_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 stock_lock irq_context: 0 &r->consumer_lock irq_context: 0 &mm->mmap_lock stock_lock irq_context: 0 &mm->mmap_lock stock_lock irq_context: 0 &ids->rwsem irq_context: 0 &ids->rwsem fs_reclaim irq_context: 0 &ids->rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ids->rwsem stock_lock irq_context: 0 &ids->rwsem pool_lock#2 irq_context: 0 &ids->rwsem lock irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 &c->lock irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 pool_lock#2 irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &new->lock#2 irq_context: 0 rcu_read_lock &new->lock#2 &undo_list->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 &undo_list->lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 &obj_hash[i].lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 pool_lock#2 irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 krc.lock irq_context: 0 &ids->rwsem &obj_hash[i].lock irq_context: 0 &ids->rwsem &c->lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 stock_lock irq_context: 0 &ids->rwsem irq_context: 0 &vma->vm_lock->lock lock#4 &lruvec->lru_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem stock_lock irq_context: 0 pcpu_lock stock_lock irq_context: 0 tasklist_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu irq_context: 0 sb_writers#3 &s->s_inode_list_lock irq_context: 0 sb_writers#3 sb_internal irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 sb_internal pool_lock#2 irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 sb_internal &obj_hash[i].lock irq_context: 0 sb_writers#3 inode_hash_lock irq_context: 0 sb_writers#3 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &fsnotify_mark_srcu irq_context: 0 sb_writers#3 sb_internal &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &wb->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &____s->seqcount irq_context: 0 &fsnotify_mark_srcu &rq->__lock irq_context: 0 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &____s->seqcount#2 irq_context: 0 &p->lock &of->mutex kn->active#4 &____s->seqcount#2 irq_context: 0 sb_writers#7 &xattrs->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET quarantine_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &xt[i].mutex &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 kn->active#4 fs_reclaim &rq->__lock irq_context: 0 sb_writers &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work quarantine_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &mm->mmap_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_callback put_task_map-wait-type-override stock_lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock &____s->seqcount#2 irq_context: 0 &hdev->req_lock &hdev->lock &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &n->list_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock quarantine_lock irq_context: 0 &data->open_mutex triggers_list_lock &rq->__lock irq_context: 0 &data->open_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &disk->open_mutex &nbd->config_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.gp_wq irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 batched_entropy_u8.lock crngs.lock irq_context: 0 rcu_state.barrier_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex wq_pool_mutex &rq->__lock irq_context: 0 &data->open_mutex wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex dpm_list_mtx &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_node_0 irq_context: 0 &dir->lock#2 quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 &sbi->s_writepages_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 kn->active#49 &n->list_lock irq_context: 0 kn->active#49 &n->list_lock &c->lock irq_context: 0 cb_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 nf_hook_mutex &n->list_lock irq_context: 0 nf_hook_mutex &n->list_lock &c->lock irq_context: 0 &group->mark_mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rq->__lock cpu_asid_lock irq_context: 0 &data->open_mutex remove_cache_srcu &n->list_lock irq_context: 0 &data->open_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &data->open_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &data->open_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &data->open_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &data->open_mutex remove_cache_srcu &rq->__lock irq_context: 0 &data->open_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: softirq (&peer->timer_send_keepalive) kfence_freelist_lock irq_context: 0 &nbd->config_lock &meta->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex &mm->mmap_lock &sem->wait_lock irq_context: 0 rtnl_mutex &ul->lock#2 &n->list_lock irq_context: 0 &kcov->lock kcov_remote_lock &n->list_lock irq_context: 0 &kcov->lock kcov_remote_lock &n->list_lock &c->lock irq_context: 0 &data->open_mutex &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &rq->__lock irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock hwsim_radio_lock &____s->seqcount#2 irq_context: softirq rcu_read_lock hwsim_radio_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) stock_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) pcpu_lock stock_lock irq_context: 0 misc_mtx system_transition_mutex irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem tk_core.seq.seqcount irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem input_pool.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem cpu_add_remove_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem thermal_list_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem (work_completion)(&(&fw_cache.work)->work) irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &dev->devres_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem async_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 rtnl_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx remove_cache_srcu rcu_node_0 irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex kfence_freelist_lock irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &meta->lock irq_context: 0 (wq_completion)events_unbound &rq->__lock irq_context: 0 (wq_completion)events_unbound &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)events (shepherd).work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (shepherd).work fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 misc_mtx misc_mtx.wait_lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 kn->active#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 remove_cache_srcu pool_lock irq_context: 0 &root->deactivate_waitq &p->pi_lock &rq->__lock irq_context: 0 &root->deactivate_waitq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &meta->lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 nfc_devlist_mutex dpm_list_mtx.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_node_0 irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &hashinfo->lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_internal &c->lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 &mm->mmap_lock &sem->wait_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx stock_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx key irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx percpu_counters_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx pcpu_lock stock_lock irq_context: 0 nfc_devlist_mutex &dev->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &c->lock irq_context: 0 &root->kernfs_iattr_rwsem &rq->__lock cpu_asid_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock stock_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock key irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem key irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 system_transition_mutex device_hotplug_lock (console_sem).lock irq_context: 0 system_transition_mutex device_hotplug_lock wq_pool_mutex irq_context: 0 system_transition_mutex device_hotplug_lock tasklist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &table->lock#2 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &base->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex.wait_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &p->pi_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &x->wait#24 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &table->lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex _xmit_ETHER &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh key#20 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &entry->crc_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &sem->wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 tasklist_lock stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &____s->seqcount#2 irq_context: 0 rtnl_mutex lweventlist_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex lweventlist_lock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rnp->exp_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rnp->exp_wq[3] irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &c->lock irq_context: hardirq|softirq hrtimer_bases.lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rlock-AF_NETLINK irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: 0 cb_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock rcu_read_lock &____s->seqcount irq_context: 0 misc_mtx.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_ROSE irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE slock-AF_ROSE irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_ROSE irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE rose_list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE rlock-AF_ROSE irq_context: 0 system_transition_mutex (pm_chain_head).rwsem irq_context: 0 system_transition_mutex (pm_chain_head).rwsem tk_core.seq.seqcount irq_context: 0 system_transition_mutex (pm_chain_head).rwsem input_pool.lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem cpu_add_remove_lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem thermal_list_lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem fw_lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem &obj_hash[i].lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem &base->lock irq_context: 0 system_transition_mutex (pm_chain_head).rwsem &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &____s->seqcount#2 irq_context: 0 cb_lock &____s->seqcount#2 irq_context: 0 cb_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: softirq rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 kn->active#4 &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_es_lock key#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override pool_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount irq_context: 0 sb_writers#7 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#7 tomoyo_ss &____s->seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&hsr->prune_timer) irq_context: softirq (&hsr->prune_timer) &hsr->list_lock irq_context: softirq (&hsr->prune_timer) &obj_hash[i].lock irq_context: softirq (&hsr->prune_timer) &base->lock irq_context: softirq (&hsr->prune_timer) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rq->__lock irq_context: softirq rcu_read_lock &table->lock#3 irq_context: softirq rcu_read_lock rcu_read_lock &table->lock#3 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &____s->seqcount#2 irq_context: 0 &ids->rwsem rcu_read_lock rcu_node_0 irq_context: 0 &ids->rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pa->pa_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex bit_wait_table + i irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &__ctx->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq rcu_read_lock &xa->xa_lock#9 key#12 irq_context: hardirq rcu_read_lock &xa->xa_lock#9 key#11 irq_context: hardirq rcu_read_lock &xa->xa_lock#9 key#14 irq_context: hardirq &ret->b_uptodate_lock bit_wait_table + i irq_context: hardirq &ret->b_uptodate_lock bit_wait_table + i &p->pi_lock irq_context: hardirq &ret->b_uptodate_lock bit_wait_table + i &p->pi_lock &rq->__lock irq_context: hardirq &ret->b_uptodate_lock bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 pool_lock irq_context: 0 sk_lock-AF_INET &f->f_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock crngs.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 sk_lock-AF_INET &f->f_lock fasync_lock irq_context: 0 sk_lock-AF_INET &f->f_lock fasync_lock &new->fa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &____s->seqcount#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &local->filter_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &obj_hash[i].lock pool_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 nfc_devlist_mutex dpm_list_mtx &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu pool_lock#2 irq_context: 0 nfc_devlist_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &____s->seqcount#2 irq_context: 0 ebt_mutex &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex triggers_list_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &base->lock irq_context: 0 sk_lock-AF_INET &f->f_lock fasync_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &f->f_lock fasync_lock pool_lock#2 irq_context: 0 &ctx->uring_lock &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock irq_context: 0 &ctx->uring_lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 uevent_sock_mutex rcu_node_0 irq_context: 0 uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock pool_lock irq_context: 0 &mm->mmap_lock &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 devnet_rename_sem irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock quarantine_lock irq_context: 0 misc_mtx rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &sem->wait_lock irq_context: 0 misc_mtx rcu_read_lock &obj_hash[i].lock irq_context: 0 misc_mtx rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock console_owner_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem net_rwsem net_rwsem.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem net_rwsem &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock remove_cache_srcu irq_context: 0 &data->open_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF &ei->socket.wq.wait irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 wq_pool_attach_mutex &rq->__lock irq_context: 0 wq_pool_attach_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (debug_obj_work).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &nft_net->commit_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem key irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &pcp->lock &zone->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sbi->s_md_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) key#15 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events reg_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock key#11 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &retval->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &rq_wait->wait irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &__ctx->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_node_0 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: hardirq &rq_wait->wait irq_context: hardirq &rq_wait->wait &p->pi_lock irq_context: hardirq &rq_wait->wait &p->pi_lock &rq->__lock irq_context: hardirq &rq_wait->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &mm->mmap_lock key irq_context: 0 &xt[i].mutex &mm->mmap_lock pcpu_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock percpu_counters_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &c->lock batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &c->lock kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &sem->wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_read_lock &br->multicast_lock &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem key irq_context: 0 rtnl_mutex dev_addr_sem pcpu_lock irq_context: 0 rtnl_mutex dev_addr_sem percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rq->__lock cpu_asid_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &n->list_lock &c->lock irq_context: 0 rtnl_mutex &fsnotify_mark_srcu &rq->__lock irq_context: 0 rtnl_mutex &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex stock_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF clock-AF_CAIF irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#10 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_CAIF irq_context: 0 &data->open_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock &wq->mutex rcu_node_0 irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock &br->hash_lock &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock &br->hash_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#10 &____s->seqcount#2 irq_context: 0 sb_writers#10 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnettable->lock &rq->__lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 rtnl_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#312 &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex (console_sem).lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &xt[i].mutex &pcp->lock &zone->lock irq_context: 0 &xt[i].mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: softirq &(&conn->info_timer)->timer irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&conn->info_timer)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&conn->info_timer)->work) &conn->chan_lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events fqdir_free_work irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &mm->mmap_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &this->info_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pnettable->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pnetids_ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 rlock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 &list->lock#19 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 &sctp_ep_hashtable[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET6 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET6 pool_lock#2 irq_context: 0 (wq_completion)events fqdir_free_work &obj_hash[i].lock irq_context: 0 (wq_completion)events fqdir_free_work pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#312 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF key irq_context: 0 stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 kn->active#51 remove_cache_srcu &c->lock irq_context: 0 kn->active#51 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#51 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#51 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#51 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &x->wait#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 kn->active#4 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock &wq->mutex &rq->__lock irq_context: 0 nfc_devlist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex key irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex &nbd->config_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &wq->mutex &rq->__lock irq_context: 0 &hdev->req_lock &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: softirq (&app->join_timer)#2 fill_pool_map-wait-type-override &c->lock irq_context: softirq (&app->join_timer)#2 fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&app->join_timer)#2 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&app->join_timer)#2 fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&app->join_timer)#2 fill_pool_map-wait-type-override pool_lock irq_context: 0 system_transition_mutex device_hotplug_lock &rq->__lock cpu_asid_lock irq_context: 0 &ids->rwsem batched_entropy_u8.lock crngs.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key &n->list_lock &c->lock irq_context: 0 cb_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex stock_lock irq_context: 0 cb_lock genl_mutex key irq_context: 0 cb_lock genl_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex percpu_counters_lock irq_context: 0 cb_lock genl_mutex pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex &cfs_rq->removed.lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &____s->seqcount#2 irq_context: 0 sb_writers#7 kn->active#4 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 kn->active#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 kn->active#4 &____s->seqcount irq_context: 0 sb_writers#7 kn->active#4 &n->list_lock irq_context: 0 sb_writers#7 kn->active#4 &n->list_lock &c->lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)events (work_completion)(&w->w) quarantine_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 nf_sockopt_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &rq->__lock irq_context: 0 cb_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_owner_lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem pcpu_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 quarantine_lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock pool_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex rcu_node_0 irq_context: 0 rtnl_mutex &idev->mc_lock &lock->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &rcu_state.expedited_wq irq_context: 0 &data->open_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex net_rwsem.wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 cb_lock rcu_read_lock &n->list_lock irq_context: 0 cb_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 cb_lock rcu_read_lock rcu_node_0 irq_context: 0 cb_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_owner_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_owner irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_CAIF irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#46/1 binderfs_minors_mutex &rq->__lock irq_context: 0 &type->s_umount_key#46/1 binderfs_minors_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex percpu_counters_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &hwstats->hwsdev_list_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &retval->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq_wait->wait irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &__ctx->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex &wg->device_update_lock kfence_freelist_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem quarantine_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 kn->active#48 &n->list_lock irq_context: 0 kn->active#48 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &fq->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &local->active_txq_lock[i] irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx (work_completion)(&sta->drv_deliver_wk) irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &meta->lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem batched_entropy_u8.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem kfence_freelist_lock irq_context: 0 sb_writers#9 &of->mutex kn->active#50 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#9 &of->mutex kn->active#50 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex kn->active#50 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 batched_entropy_u8.lock crngs.lock irq_context: 0 sk_lock-AF_CAIF slock-AF_CAIF irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &((cluster_info + ci)->lock)#2 irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock fs_reclaim irq_context: 0 (wq_completion)nfc3_nci_tx_wq#310 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &n->list_lock &c->lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &obj_hash[i].lock pool_lock irq_context: 0 nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &hwstats->hwsdev_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock key irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex pcpu_lock stock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock batched_entropy_u8.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock kfence_freelist_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock init_task.mems_allowed_seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim &rq->__lock irq_context: 0 kn->active#4 fs_reclaim &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#4 rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &meta->lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock batched_entropy_u8.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &lock->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &rq->__lock irq_context: 0 sb_writers#7 stock_lock irq_context: 0 sb_writers#7 key irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &c->lock irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 sb_writers#10 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#5 &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 &obj_hash[i].lock irq_context: 0 misc_mtx rfkill_global_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->forw_bcast_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->gw.list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->bat_v.ogm_wq)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->bat_v.ogm_buff_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->bat_v.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->bat_v.ogm_buff_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.handler_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->nc.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#17 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#18 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.container_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_mayday_lock irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &hn->hn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.container_list_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->dat.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &x->wait#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)inet_frag_wq irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex pool_lock#2 irq_context: 0 sb_writers#10 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#10 remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#10 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &base->lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 system_transition_mutex device_hotplug_lock tasklist_lock freezer_lock irq_context: 0 system_transition_mutex device_hotplug_lock &rq->__lock irq_context: 0 system_transition_mutex device_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &hash->list_locks[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->bla.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->mcast.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->tt.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 &bat_priv->softif_vlan_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#21 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.req_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.roam_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->orig_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#19 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem quarantine_lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work sb_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 sk_lock-AF_VSOCK slock-AF_VSOCK &sk->sk_lock.wq irq_context: 0 slock-AF_VSOCK &sk->sk_lock.wq irq_context: 0 slock-AF_VSOCK &sk->sk_lock.wq &p->pi_lock irq_context: 0 slock-AF_VSOCK &sk->sk_lock.wq &p->pi_lock &rq->__lock irq_context: 0 slock-AF_VSOCK &sk->sk_lock.wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &n->list_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss batched_entropy_u8.lock crngs.lock irq_context: 0 sk_lock-AF_CAIF &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount#2 irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&cb->timer) &rq_wait->wait irq_context: softirq (&cb->timer) &rq_wait->wait &p->pi_lock irq_context: softirq (&cb->timer) &rq_wait->wait &p->pi_lock &rq->__lock irq_context: softirq (&cb->timer) &rq_wait->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &____s->seqcount#2 irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &tbl->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&lapb->t1timer) &lapb->lock &____s->seqcount#2 irq_context: softirq (&lapb->t1timer) &lapb->lock &pcp->lock &zone->lock irq_context: softirq (&lapb->t1timer) &lapb->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock stock_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock key irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock pcpu_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock percpu_counters_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex nf_hook_mutex &n->list_lock irq_context: 0 rtnl_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rcu_state.expedited_wq irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &____s->seqcount#2 irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &lock->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &p->alloc_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &c->lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock &meta->lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#590 irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock init_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &zone->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 &journal->j_checkpoint_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal key irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &tn->lock rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &p->lock &____s->seqcount#2 irq_context: 0 &p->lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock rcu_node_0 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex net_rwsem pool_lock#2 irq_context: 0 rtnl_mutex net_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex net_rwsem nl_table_lock irq_context: 0 rtnl_mutex net_rwsem nl_table_wait.lock irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_read_lock &br->hash_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal pcpu_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dpm_list_mtx &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#590 irq_context: 0 &pipe->mutex/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#590 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#589 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 key#12 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 key#14 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rcu_read_lock &____s->seqcount irq_context: 0 cb_lock genl_mutex quarantine_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_node_0 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex.wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &lock->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem nl_table_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem nl_table_wait.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 system_transition_mutex device_hotplug_lock console_lock console_srcu console_owner_lock irq_context: 0 system_transition_mutex device_hotplug_lock console_lock console_srcu console_owner irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock once_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock once_lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &type->s_umount_key#46/1 &c->lock irq_context: 0 &type->s_umount_key#46/1 &____s->seqcount#2 irq_context: 0 &type->s_umount_key#46/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#46/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 &rq->__lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &____s->seqcount#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &____s->seqcount#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &pcp->lock &zone->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &____s->seqcount irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: hardirq &rq_wait->wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &____s->seqcount irq_context: softirq rcu_read_lock &ifibss->incomplete_lock irq_context: softirq rcu_read_lock &rdev->wiphy_work_lock irq_context: softirq rcu_read_lock &local->rx_path_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &local->rx_path_lock pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex.wait_lock irq_context: 0 sb_writers#8 &of->mutex &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &n->list_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 kn->active#48 &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &n->list_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 kn->active#50 &c->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex.wait_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex.wait_lock irq_context: 0 sb_writers#9 &of->mutex &p->pi_lock irq_context: 0 kn->active#51 &c->lock irq_context: 0 kn->active#51 &n->list_lock irq_context: 0 kn->active#51 &n->list_lock &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &c->lock irq_context: 0 sb_writers#9 &of->mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_hook_mutex &pcp->lock &zone->lock irq_context: 0 nf_hook_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pcpu_alloc_mutex fs_reclaim irq_context: 0 pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pcpu_alloc_mutex &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &retval->lock irq_context: 0 nf_nat_proto_mutex &c->lock irq_context: 0 nf_nat_proto_mutex &____s->seqcount#2 irq_context: 0 nf_nat_proto_mutex &____s->seqcount irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 &c->lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock krc.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx quarantine_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex remove_cache_srcu irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &dev->tx_global_lock irq_context: 0 rtnl_mutex &dev->tx_global_lock _xmit_ETHER#2 irq_context: 0 rtnl_mutex &dev->tx_global_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &sch->q.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &n->list_lock &c->lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex free_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &c->lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 &ids->rwsem &obj_hash[i].lock pool_lock irq_context: 0 nf_nat_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &____s->seqcount irq_context: softirq (&sdp->delay_work) irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem stock_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &lock->wait_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &p->pi_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override pool_lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock stock_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 kn->active#52 &c->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex &c->lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#50 &n->list_lock irq_context: 0 kn->active#50 &n->list_lock &c->lock irq_context: 0 kn->active#50 &rq->__lock irq_context: 0 kn->active#50 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu irq_context: 0 &xt[i].mutex remove_cache_srcu quarantine_lock irq_context: 0 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &base->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 &sighand->siglock stock_lock irq_context: 0 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 pool_lock#2 irq_context: 0 sb_writers#4 &rq->__lock irq_context: 0 sb_writers#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle key#4 irq_context: 0 proto_tab_lock irq_context: 0 proto_tab_lock &c->lock irq_context: 0 proto_tab_lock pool_lock#2 irq_context: 0 proto_tab_lock &dir->lock irq_context: 0 proto_tab_lock &obj_hash[i].lock irq_context: 0 &group->mark_mutex &rq->__lock irq_context: 0 &group->mark_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 vsock_table_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#16 &p->pi_lock &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sched_map-wait-type-override &rq->__lock irq_context: 0 sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_persistent_keepalive) irq_context: softirq (&peer->timer_persistent_keepalive) pool_lock#2 irq_context: softirq (&peer->timer_persistent_keepalive) &list->lock#14 irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_VSOCK irq_context: 0 sk_lock-AF_VSOCK slock-AF_VSOCK irq_context: 0 sk_lock-AF_VSOCK &mm->mmap_lock irq_context: 0 slock-AF_VSOCK irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &____s->seqcount#2 irq_context: 0 misc_mtx nfc_index_ida.xa_lock irq_context: 0 misc_mtx pcpu_alloc_mutex irq_context: 0 misc_mtx pcpu_alloc_mutex pcpu_lock irq_context: 0 misc_mtx cpu_hotplug_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 misc_mtx kthread_create_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &____s->seqcount#2 irq_context: 0 misc_mtx &x->wait irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 tasklist_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx wq_pool_mutex irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex irq_context: 0 misc_mtx &n->list_lock irq_context: 0 misc_mtx &n->list_lock &c->lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu &c->lock irq_context: 0 &xt[i].mutex remove_cache_srcu &n->list_lock irq_context: 0 &xt[i].mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &xt[i].mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &xt[i].mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &fsnotify_mark_srcu &c->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 &p->lock &mm->mmap_lock &rq->__lock irq_context: 0 &p->lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx nfc_devlist_mutex &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &k->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex &k->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx nfc_devlist_mutex bus_type_sem irq_context: 0 misc_mtx nfc_devlist_mutex sysfs_symlink_target_lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx nfc_devlist_mutex &dev->power.lock irq_context: 0 misc_mtx nfc_devlist_mutex dpm_list_mtx irq_context: 0 misc_mtx nfc_devlist_mutex &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 &k->k_lock irq_context: 0 misc_mtx &k->k_lock irq_context: 0 misc_mtx llcp_devices_lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim irq_context: 0 misc_mtx &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx &dev->mutex pool_lock#2 irq_context: 0 misc_mtx &dev->mutex &x->wait#9 irq_context: 0 misc_mtx &dev->mutex &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &k->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &dev->power.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rfkill->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &k->k_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex leds_list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex.wait_lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nl_table_lock irq_context: 0 misc_mtx nl_table_wait.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_VSOCK irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_VSOCK slock-AF_VSOCK irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_VSOCK vsock_table_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_VSOCK clock-AF_VSOCK irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_VSOCK irq_context: 0 &sb->s_type->i_mutex_key#10 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_NFC irq_context: 0 &ndev->req_lock irq_context: 0 &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock &wq->mutex irq_context: 0 &ndev->req_lock &wq->mutex &pool->lock irq_context: 0 &ndev->req_lock &wq->mutex &x->wait#10 irq_context: 0 &ndev->req_lock (&ndev->cmd_timer) irq_context: 0 &ndev->req_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock &base->lock irq_context: 0 &ndev->req_lock &rq->__lock irq_context: 0 &ndev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (&ndev->data_timer) irq_context: 0 &ids->rwsem &____s->seqcount#2 irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &rq->__lock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &cfs_rq->removed.lock irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) irq_context: 0 &dev->mutex (work_completion)(&rfkill->sync_work) irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 &dev->mutex &sem->wait_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex kernfs_idr_lock irq_context: 0 &dev->mutex &k->k_lock klist_remove_lock irq_context: softirq (&ndev->rs_timer) irq_context: softirq (&ndev->rs_timer) &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) pool_lock#2 irq_context: softirq (&ndev->rs_timer) &dir->lock#2 irq_context: softirq (&ndev->rs_timer) &ul->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#11 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: softirq (&ndev->rs_timer) &ndev->lock batched_entropy_u32.lock irq_context: softirq (&ndev->rs_timer) &ndev->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) &ndev->lock &base->lock irq_context: softirq (&ndev->rs_timer) &ndev->lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex subsys mutex#40 irq_context: 0 &dev->mutex subsys mutex#40 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#40 &k->k_lock klist_remove_lock irq_context: 0 &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &rfkill->lock irq_context: 0 &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &dev->mutex triggers_list_lock irq_context: 0 &dev->mutex leds_list_lock irq_context: 0 &local->sockets.lock irq_context: 0 &local->raw_sockets.lock irq_context: 0 (&local->link_timer) irq_context: 0 (work_completion)(&local->tx_work) irq_context: 0 (work_completion)(&local->rx_work) irq_context: 0 (work_completion)(&local->timeout_work) irq_context: 0 (&local->sdreq_timer) irq_context: 0 (work_completion)(&local->sdreq_timeout_work) irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 nfc_devlist_mutex kernfs_idr_lock irq_context: 0 nfc_devlist_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 nfc_devlist_mutex subsys mutex#39 &k->k_lock klist_remove_lock irq_context: 0 nfc_devlist_mutex &x->wait#9 irq_context: 0 nfc_devlist_mutex deferred_probe_mutex irq_context: 0 nfc_devlist_mutex device_links_lock irq_context: 0 nfc_devlist_mutex mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &____s->seqcount#2 irq_context: 0 &ids->rwsem &____s->seqcount irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal &rq->__lock irq_context: 0 sb_writers#3 sb_internal &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &ids->rwsem &rq->__lock irq_context: 0 &ids->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&dom->period_timer) &p->sequence key#14 irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#7 tomoyo_ss &n->list_lock &c->lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &____s->seqcount#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &xa->xa_lock#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle kfence_freelist_lock irq_context: 0 sb_writers#7 kn->active#4 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) &c->lock irq_context: 0 &tsk->futex_exit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 system_transition_mutex device_hotplug_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_es_lock key#6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock crngs.lock irq_context: softirq (&ndev->rs_timer) &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &n->list_lock irq_context: 0 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock &rq->__lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu pool_lock#2 irq_context: 0 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback stock_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_persistent_keepalive) &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback stock_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 krc.lock &obj_hash[i].lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 krc.lock &base->lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 krc.lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock hwsim_radio_lock &n->list_lock irq_context: softirq rcu_read_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle key#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &lock->wait_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu &n->list_lock irq_context: 0 &fsnotify_mark_srcu &n->list_lock &c->lock irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal &n->list_lock irq_context: 0 sb_writers#3 sb_internal &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &mapping->i_private_lock irq_context: softirq &(&bat_priv->tt.work)->timer irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) key#16 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) key#21 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &bat_priv->tt.req_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &bat_priv->tt.roam_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &ids->rwsem &pcp->lock &zone->lock irq_context: 0 &root->kernfs_iattr_rwsem stock_lock irq_context: 0 &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_iattr_rwsem key irq_context: 0 &root->kernfs_iattr_rwsem pcpu_lock irq_context: 0 &root->kernfs_iattr_rwsem percpu_counters_lock irq_context: 0 &root->kernfs_iattr_rwsem pcpu_lock stock_lock irq_context: 0 &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu quarantine_lock irq_context: 0 &xt[i].mutex &rq->__lock cpu_asid_lock irq_context: softirq rcu_read_lock rcu_read_lock &zone->lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 file_rwsem &rq->__lock irq_context: 0 file_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &ei->i_es_lock key#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rq->__lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &c->lock irq_context: 0 sb_writers#7 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 &tsk->futex_exit_mutex &cfs_rq->removed.lock irq_context: 0 &tsk->futex_exit_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work pool_lock irq_context: 0 &ids->rwsem remove_cache_srcu irq_context: 0 &ids->rwsem remove_cache_srcu quarantine_lock irq_context: 0 &ids->rwsem remove_cache_srcu &c->lock irq_context: 0 &ids->rwsem remove_cache_srcu &n->list_lock irq_context: 0 &ids->rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &sk->sk_lock.wq irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &rq->__lock irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock &n->list_lock irq_context: 0 &vma->vm_lock->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 system_transition_mutex device_hotplug_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_node_0 irq_context: 0 (wq_completion)events (shepherd).work &rq->__lock irq_context: 0 (wq_completion)events (shepherd).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu &rq->__lock irq_context: 0 &xt[i].mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 sb_internal &____s->seqcount#2 irq_context: 0 sb_writers#3 sb_internal &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_node_0 irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#7 kn->active#4 &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &p->lock &of->mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &____s->seqcount irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal &pcp->lock &zone->lock irq_context: 0 sb_writers#3 sb_internal &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &n->lock irq_context: 0 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &group->mark_mutex &____s->seqcount#2 irq_context: 0 &group->mark_mutex &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock sb_writers#5 &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 pool_lock#2 irq_context: softirq (&ndev->rs_timer) &ndev->lock batched_entropy_u32.lock crngs.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu irq_context: 0 &mm->mmap_lock remove_cache_srcu quarantine_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &r->producer_lock#3 irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta->lock irq_context: 0 lock pidmap_lock &n->list_lock irq_context: 0 lock pidmap_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) &n->list_lock irq_context: softirq (&ndev->rs_timer) &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &____s->seqcount#2 irq_context: 0 &vma->vm_lock->lock &____s->seqcount#2 irq_context: 0 cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &bgl->locks[i].lock irq_context: 0 &ids->rwsem remove_cache_srcu &rq->__lock irq_context: 0 &ids->rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &ids->rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 kfence_freelist_lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 stock_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock pool_lock#2 irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 pcpu_lock stock_lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_callback stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_node_0 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &p->lock remove_cache_srcu pool_lock#2 irq_context: 0 &p->lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 nf_sockopt_mutex &rq->__lock irq_context: 0 nf_sockopt_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#14 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &bgl->locks[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 quarantine_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 quarantine_lock irq_context: 0 kn->active#4 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &xt[i].mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &cfs_rq->removed.lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 &xt[i].mutex batched_entropy_u8.lock irq_context: 0 &xt[i].mutex kfence_freelist_lock irq_context: 0 &xt[i].mutex &meta->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem &n->list_lock irq_context: 0 &ids->rwsem &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &bgl->locks[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu pool_lock#2 irq_context: 0 lock pidmap_lock &____s->seqcount#2 irq_context: 0 lock pidmap_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#3 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock rcu_node_0 irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &cfs_rq->removed.lock irq_context: 0 lock map_idr_lock irq_context: 0 lock map_idr_lock pool_lock#2 irq_context: 0 map_idr_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex sched_map-wait-type-override &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 udc_lock.wait_lock irq_context: 0 sb_writers#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 lock pidmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 bdev_lock &rq->__lock irq_context: 0 bdev_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock kfence_freelist_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock smack_known_lock.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &c->lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &n->list_lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &obj_hash[i].lock irq_context: 0 &disk->open_mutex &lo->lo_mutex &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &bgl->locks[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &cfs_rq->removed.lock irq_context: 0 &p->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &obj_hash[i].lock pool_lock irq_context: 0 &xt[i].mutex stock_lock irq_context: 0 &xt[i].mutex key irq_context: 0 &xt[i].mutex pcpu_lock irq_context: 0 &xt[i].mutex percpu_counters_lock irq_context: 0 &xt[i].mutex pcpu_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock &rq->__lock irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle bit_wait_table + i irq_context: 0 &ret->b_state_lock &obj_hash[i].lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &obj_hash[i].lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock irq_context: 0 &xt[i].mutex quarantine_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_INET fs_reclaim &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 &mm->mmap_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#7 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &kcov->lock kcov_remote_lock &obj_hash[i].lock irq_context: 0 tasklist_lock rcu_read_lock &sighand->siglock irq_context: 0 &xt[i].mutex rcu_read_lock rcu_node_0 irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dentry->d_lock &sb->s_type->i_lock_key#22 &lru->node[i].lock irq_context: 0 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_internal irq_context: 0 sb_internal jbd2_handle &ei->i_raw_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &ei->i_raw_lock irq_context: 0 sb_internal jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_internal jbd2_handle &ei->i_es_lock irq_context: 0 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#47 irq_context: 0 &type->s_umount_key#47 &x->wait#23 irq_context: 0 &type->s_umount_key#47 shrinker_mutex irq_context: 0 &type->s_umount_key#47 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#47 pool_lock#2 irq_context: 0 &type->s_umount_key#47 rename_lock.seqcount irq_context: 0 &type->s_umount_key#47 &dentry->d_lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#47 &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#47 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#47 &sb->s_type->i_lock_key#32 irq_context: 0 &type->s_umount_key#47 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#47 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#47 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#47 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock pool_lock#2 irq_context: 0 &type->s_umount_key#47 binderfs_minors_mutex irq_context: 0 &type->s_umount_key#47 binderfs_minors_mutex binderfs_minors.xa_lock irq_context: 0 &mq_lock irq_context: 0 kcov_remote_lock irq_context: 0 (wq_completion)events free_ipc_work irq_context: 0 (wq_completion)events free_ipc_work &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &x->wait#2 irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events free_ipc_work &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss stock_lock irq_context: 0 tomoyo_ss key irq_context: 0 tomoyo_ss pcpu_lock irq_context: 0 tomoyo_ss percpu_counters_lock irq_context: 0 tomoyo_ss pcpu_lock stock_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock mount_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work mount_lock irq_context: 0 (wq_completion)events free_ipc_work mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work &fsnotify_mark_srcu irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &x->wait#23 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 shrinker_mutex irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 rename_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &sb->s_type->i_lock_key#19 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &s->s_inode_list_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &xa->xa_lock#9 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &fsnotify_mark_srcu irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &dentry->d_lock/1 irq_context: 0 (wq_completion)events free_ipc_work sb_lock irq_context: 0 (wq_completion)events free_ipc_work unnamed_dev_ida.xa_lock irq_context: 0 (wq_completion)events free_ipc_work list_lrus_mutex irq_context: 0 (wq_completion)events free_ipc_work &xa->xa_lock#5 irq_context: 0 (wq_completion)events free_ipc_work pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work sb_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sb_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work mnt_id_ida.xa_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 stock_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)events free_ipc_work &ht->mutex irq_context: 0 (wq_completion)events free_ipc_work &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work percpu_counters_lock irq_context: 0 (wq_completion)events free_ipc_work pcpu_lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock sysctl_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)events free_ipc_work &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)events free_ipc_work &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work rename_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &s->s_inode_list_lock irq_context: 0 (wq_completion)events free_ipc_work &xa->xa_lock#9 irq_context: 0 (wq_completion)events free_ipc_work proc_inum_ida.xa_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &folio_wait_table[i] irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &folio_wait_table[i] &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &base->lock &obj_hash[i].lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: softirq (&peer->timer_retransmit_handshake) irq_context: softirq (&peer->timer_retransmit_handshake) &peer->endpoint_lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock key irq_context: 0 map_idr_lock &obj_hash[i].lock irq_context: 0 map_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) stock_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) fill_pool_map-wait-type-override pool_lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_VSOCK fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_VSOCK fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rhashtable_bucket irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pin_fs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) pool_lock#2 irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &obj_hash[i].lock pool_lock irq_context: 0 lock map_idr_lock &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 (work_completion)(&data->suspend_work) irq_context: 0 &hdev->unregister_lock irq_context: 0 hci_dev_list_lock irq_context: 0 (work_completion)(&hdev->power_on) irq_context: 0 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (work_completion)(&hdev->reenable_adv_work) irq_context: 0 &hdev->cmd_sync_work_lock irq_context: 0 &hdev->req_lock (work_completion)(&(&hdev->interleave_scan)->work) irq_context: 0 &hdev->req_lock hci_dev_list_lock irq_context: 0 &hdev->req_lock (work_completion)(&hdev->tx_work) irq_context: 0 &hdev->req_lock (work_completion)(&hdev->rx_work) irq_context: 0 &hdev->req_lock &wq->mutex irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_node_0 irq_context: 0 &hdev->req_lock &wq->mutex &pool->lock irq_context: 0 &hdev->req_lock &wq->mutex &x->wait#10 irq_context: 0 &hdev->req_lock &hdev->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#2 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex rfkill_global_mutex &____s->seqcount#2 irq_context: 0 &data->open_mutex rfkill_global_mutex &pcp->lock &zone->lock irq_context: 0 &data->open_mutex rfkill_global_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &data->open_mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &p->pi_lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock_bh _xmit_X25#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock pcpu_lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_CAIF &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 rtnl_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock percpu_counters_lock irq_context: 0 rtnl_mutex rcu_read_lock console_owner irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex fs_reclaim stock_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock &rq->__lock irq_context: 0 &ctx->uring_lock &n->list_lock irq_context: 0 &ctx->uring_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex key#23 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem console_owner_lock irq_context: 0 rtnl_mutex devnet_rename_sem console_owner irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &n->list_lock &c->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock &base->lock irq_context: 0 &hdev->req_lock &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 &hdev->req_lock &hdev->lock &x->wait#2 irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &xa->xa_lock#9 batched_entropy_u8.lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex pcpu_lock stock_lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock quarantine_lock irq_context: 0 &xa->xa_lock#9 kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 &hdev->req_lock &hdev->lock pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 &hdev->req_lock &hdev->lock batched_entropy_u8.lock irq_context: 0 &hdev->req_lock &hdev->lock kfence_freelist_lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu quarantine_lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &c->lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &n->list_lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock &meta->lock irq_context: 0 udc_lock rcu_node_0 irq_context: 0 udc_lock &rcu_state.expedited_wq irq_context: 0 udc_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 udc_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 udc_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 vmap_purge_lock free_vmap_area_lock quarantine_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem rcu_node_0 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &hashinfo->lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &dir->lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &ul->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock batched_entropy_u32.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock quarantine_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock batched_entropy_u32.lock crngs.lock irq_context: 0 sk_lock-AF_NFC irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#47 binderfs_minors_mutex &rq->__lock irq_context: 0 &type->s_umount_key#47 binderfs_minors_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem &____s->seqcount#2 irq_context: 0 sb_writers#4 remove_cache_srcu irq_context: 0 sb_writers#4 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock.wait_lock irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#172 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) put_task_map-wait-type-override#3 &obj_hash[i].lock pool_lock irq_context: 0 &ep->mtx remove_cache_srcu irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock_bh _xmit_X25#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 quarantine_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock tk_core.seq.seqcount irq_context: 0 &hdev->req_lock hci_sk_list.lock irq_context: 0 &hdev->req_lock &list->lock#7 irq_context: 0 &hdev->req_lock (work_completion)(&hdev->cmd_work) irq_context: 0 &hdev->req_lock (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 &hdev->lock irq_context: 0 &hdev->lock fs_reclaim irq_context: 0 &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &hdev->lock pool_lock#2 irq_context: 0 &hdev->lock tk_core.seq.seqcount irq_context: 0 &hdev->lock hci_sk_list.lock irq_context: 0 &hdev->lock &obj_hash[i].lock irq_context: 0 hci_sk_list.lock irq_context: 0 (work_completion)(&rfkill->uevent_work) irq_context: 0 (work_completion)(&rfkill->sync_work) irq_context: 0 subsys mutex#40 irq_context: 0 subsys mutex#40 &k->k_lock irq_context: 0 subsys mutex#40 &k->k_lock klist_remove_lock irq_context: 0 &rfkill->lock irq_context: 0 uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 uevent_sock_mutex nl_table_lock irq_context: 0 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock irq_context: 0 rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 subsys mutex#74 irq_context: 0 subsys mutex#74 &k->k_lock irq_context: 0 subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 &xa->xa_lock#17 irq_context: 0 hci_index_ida.xa_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &tun->lock irq_context: 0 rtnl_mutex &dev->tx_global_lock _xmit_NETROM irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock irq_context: 0 rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: 0 rtnl_mutex __ip_vs_mutex irq_context: 0 rtnl_mutex __ip_vs_mutex &ipvs->dest_trash_lock irq_context: 0 rtnl_mutex _xmit_ETHER &obj_hash[i].lock irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock irq_context: 0 rtnl_mutex flowtable_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &data->fib_event_queue_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &c->lock irq_context: 0 rtnl_mutex &ul->lock#2 irq_context: 0 rtnl_mutex &tbl->lock &c->lock irq_context: 0 rtnl_mutex &tbl->lock &n->lock irq_context: 0 rtnl_mutex &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex &tbl->lock nl_table_lock irq_context: 0 rtnl_mutex &tbl->lock nl_table_wait.lock irq_context: 0 rtnl_mutex &tbl->lock rcu_read_lock lock#8 irq_context: 0 rtnl_mutex &tbl->lock rcu_read_lock id_table_lock irq_context: 0 rtnl_mutex &tbl->lock &dir->lock#2 irq_context: 0 rtnl_mutex &tbl->lock krc.lock irq_context: 0 rtnl_mutex &net->ipv6.addrconf_hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &x->wait#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback proc_inum_ida.xa_lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock &obj_hash[i].lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock pool_lock#2 irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &table->hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &table->hash[i].lock &table->hash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-clock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &sb->s_type->i_lock_key#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock cpu_asid_lock irq_context: 0 tomoyo_ss remove_cache_srcu &meta->lock irq_context: 0 tomoyo_ss remove_cache_srcu kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &tn->node_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ebt_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &nft_net->commit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_ct_ecache_mutex irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&local->dynamic_ps_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&dwork->timer)#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&dwork->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &list->lock#16 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->dev_wait irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &local->iflist_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_base_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_base_lock &xa->xa_lock#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx bpf_devs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem &list->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &in_dev->mc_tomb_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &tbl->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx class irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&tbl->proxy_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &net->xdp.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mirred_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &nft_net->commit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &tn->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx proc_subdir_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ent->pde_unload_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx proc_inum_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &net->ipv6.addrconf_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ndev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_query_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_query_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (work_completion)(&(&idev->mc_report_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_report_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnn->pndevs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnn->routes.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnettable->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx smc_ib_devices.mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx target_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &tn->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (work_completion)(&(&link->color_collision_detect_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &x->wait#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx deferred_probe_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx device_links_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock xps_map_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &rq_wait->wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->mgmt_registrations_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&dwork->timer)#4 irq_context: 0 &xt[i].mutex remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex.wait_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &rq->__lock irq_context: softirq (&in_dev->mr_ifc_timer) batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &n->list_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&local->restart_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &list->lock#16 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rdev->wiphy.mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rfkill->uevent_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rfkill->sync_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#40 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#40 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#40 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dpm_list_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem deferred_probe_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem device_links_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 batched_entropy_u8.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 kfence_freelist_lock irq_context: 0 rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rfkill->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem triggers_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem leds_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->wiphy_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->conn_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->event_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&rdev->dfs_update_channels_wk)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&rdev->background_cac_done_wk)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->destroy_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->propagate_radar_detect_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->propagate_cac_done_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->mgmt_registrations_update_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->background_cac_abort_wk) irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex device_links_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex &tb->tb6_lock quarantine_lock irq_context: 0 krc.lock &obj_hash[i].lock irq_context: 0 krc.lock &base->lock irq_context: 0 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex pool_lock#2 irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex bus_type_sem &rq->__lock irq_context: 0 rtnl_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#53 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#53 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#53 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem gdp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&local->sta_cleanup) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &meta->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 kn->active#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) batched_entropy_u8.lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex lweventlist_lock &____s->seqcount#2 irq_context: 0 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &br->hash_lock &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &cfs_rq->removed.lock irq_context: softirq rcu_read_lock &br->multicast_lock &n->list_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex lweventlist_lock &n->list_lock irq_context: 0 rtnl_mutex lweventlist_lock &n->list_lock &c->lock irq_context: 0 &xt[i].mutex rcu_node_0 irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock hwsim_radio_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock &br->multicast_lock &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: softirq rcu_read_lock &br->hash_lock &n->list_lock irq_context: softirq rcu_read_lock &br->hash_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 &c->lock irq_context: 0 rcu_read_lock stock_lock irq_context: 0 rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock rcu_node_0 irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_CAIF &obj_hash[i].lock irq_context: 0 ebt_mutex stock_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &bgl->locks[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem key irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_node_0 irq_context: 0 &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 &n->list_lock irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock cpu_asid_lock irq_context: 0 &ids->rwsem batched_entropy_u8.lock irq_context: 0 &ids->rwsem kfence_freelist_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle pool_lock#2 irq_context: 0 ebt_mutex pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem rcu_node_0 irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->dat.work)->timer irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->bla.work)->timer irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &hash->list_locks[i] irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) key#20 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &entry->crc_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle quarantine_lock irq_context: softirq (&ndev->rs_timer) kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock key irq_context: 0 rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 sk_lock-AF_NFC slock-AF_NFC irq_context: 0 sk_lock-AF_NFC &k->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &meta->lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &n->list_lock &c->lock irq_context: 0 ebt_mutex percpu_counters_lock irq_context: 0 ebt_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex kfence_freelist_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &xa->xa_lock#23 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock _xmit_NONE#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nn->netlink_tap_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nn->netlink_tap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &sem->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sem->wait_lock irq_context: 0 sb_writers#3 &p->pi_lock irq_context: 0 sb_writers#3 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &c->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_netgroup_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rdma_nets_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rdma_nets_rwsem rdma_nets.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem devices_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_lock irq_context: 0 sk_lock-AF_INET fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_NETLINK irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &nlk->wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rdma_nets.xa_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 rcu_state.exp_mutex.wait_lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 &p->pi_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rcu_state.expedited_wq irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &p->lock &of->mutex kn->active#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NFC &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] batched_entropy_u8.lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 key#25 irq_context: 0 slock-AF_CAIF irq_context: 0 &sb->s_type->i_mutex_key#10 rlock-AF_CAIF irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#172 &devlink_port->type_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#172 rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &tn->lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &xa->xa_lock#18 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &app->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&app->join_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&app->periodic_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &list->lock#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&app->join_timer)#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &app->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &list->lock#11 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&priv->scan_result)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&priv->connect)->work) irq_context: 0 &disk->open_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&hsr->prune_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&hsr->announce_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (console_sem).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#2 irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events free_ipc_work &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ids->rwsem stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_node_0 irq_context: 0 &fsnotify_mark_srcu &____s->seqcount#2 irq_context: 0 &fsnotify_mark_srcu &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem netns_bpf_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &cfs_rq->removed.lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 &dev->mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &sem->wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 sk_lock-AF_NFC llcp_devices_lock irq_context: 0 sk_lock-AF_NFC llcp_devices_lock &k->list_lock irq_context: 0 sk_lock-AF_NFC llcp_devices_lock &k->k_lock irq_context: 0 slock-AF_NFC irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#172 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#295 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &pcp->lock &zone->lock irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu quarantine_lock irq_context: 0 &dev->mutex kfence_freelist_lock irq_context: 0 rtnl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->fs_probe_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->cells_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->cells_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem bit_wait_table + i irq_context: 0 (wq_completion)afs irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &net->cells_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i &p->pi_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->fs_timer) irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) &(&net->fs_lock)->lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i &p->pi_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC k-slock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rx->incoming_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &call->waitq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_read_lock &call->notify_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC (rxrpc_call_limiter).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rx->recvmsg_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rx->call_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->call_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC (&call->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &list->lock#17 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)kafsd irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &local->services_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)krxrpcd irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rlock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait irq_context: 0 &rxnet->local_mutex irq_context: 0 (&local->client_conn_reap_timer) irq_context: 0 &rxnet->conn_lock irq_context: 0 &table->hash[i].lock irq_context: 0 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 k-clock-AF_INET6 irq_context: 0 &list->lock#18 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem proc_subdir_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ent->pde_unload_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem proc_inum_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex (work_completion)(&data->gc_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex net_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ovs_net->masks_rebalance)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&ovs_net->dp_notify_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &srv->idr_lock irq_context: softirq rcu_callback rlock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_pool_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_pool_mutex &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rxnet->conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC k-slock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &nt->cluster_scope_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC k-clock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ptype_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 rtnl_mutex &rnp->exp_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&tn->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 rtnl_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &tn->nametbl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&c->work)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)krdsd irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rtn->rds_tcp_accept_w) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &icsk->icsk_accept_queue.rskq_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rds_tcp_conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem loop_conns_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)l2tp irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &kernfs_locks->open_file_mutex[count] &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&rxnet->peer_keepalive_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rxnet->peer_keepalive_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&rxnet->service_conn_reap_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait#10 irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->service_conn_reaper) irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->service_conn_reaper) &rxnet->conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex netpoll_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pn->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tn->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_NONE#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_ETHER#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sch->q.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &ipvs->dest_trash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &im->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex class irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&tbl->proxy_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex flowtable_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock rt6_exception_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ifa->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&idev->mc_report_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &in_dev->mc_tomb_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &tbl->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem class irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem (&tbl->proxy_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xdp.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mirred_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nft_net->commit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ent->pde_unload_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_report_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->routes.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnettable->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex target_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_NONE irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dpm_list_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_probe_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex device_links_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock xps_map_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] rcu_node_0 irq_context: 0 &kernfs_locks->open_file_mutex[count] &rcu_state.expedited_wq irq_context: 0 &kernfs_locks->open_file_mutex[count] &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &kernfs_locks->open_file_mutex[count] &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_base_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem napi_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem netdev_unregistering_wq.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock nsim_bus_dev_ids.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_TUNNEL6#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle key#4 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock subsys mutex#76 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &k->k_lock klist_remove_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex device_links_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &k->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &c->lock irq_context: 0 remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pool_lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &c->lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &n->list_lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &n->list_lock &c->lock irq_context: 0 ebt_mutex &mm->mmap_lock &rq->__lock irq_context: 0 ebt_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)events fqdir_free_work &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hsr->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->tt.commit_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex ptype_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &entry->crc_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex key#19 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->forw_bcast_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&forw_packet_aggr->delayed_work)->work) irq_context: 0 &ids->rwsem &rq->__lock irq_context: 0 &ids->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &kcov->lock kcov_remote_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 bt_proto_lock &c->lock irq_context: 0 bt_proto_lock &n->list_lock irq_context: 0 sb_writers#4 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#4 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_TUNNEL6 irq_context: 0 bt_proto_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->tt.changes_list_lock irq_context: 0 &hdev->req_lock &hdev->lock &c->lock irq_context: 0 &hdev->req_lock &hdev->lock &n->list_lock irq_context: 0 &hdev->req_lock &hdev->lock &n->list_lock &c->lock irq_context: 0 &hdev->req_lock &c->lock irq_context: 0 &hdev->lock &c->lock irq_context: 0 cgroup_threadgroup_rwsem rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_wq[2] irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock pcpu_lock stock_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &ctrl->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#6 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_node_0 irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock &meta->lock irq_context: 0 ebt_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock kfence_freelist_lock irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &rq->__lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock lock#4 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#172 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex &tn->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#677 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock deferred_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock (console_sem).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock deferred_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &br->multicast_lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) (&p->rexmit_timer) irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &base->lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) (&p->timer) irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) pool_lock#2 irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu &base->lock irq_context: softirq (&mp->timer) irq_context: softirq (&mp->timer) &br->multicast_lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &obj_hash[i].lock pool_lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu rcu_node_0 irq_context: 0 &fsnotify_mark_srcu &rcu_state.expedited_wq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) (&mp->timer) irq_context: 0 cb_lock genl_mutex rtnl_mutex &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &xa->xa_lock#9 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (switchdev_blocking_notif_chain).rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&pmctx->ip6_mc_router_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&pmctx->ip4_mc_router_timer) irq_context: 0 &mm->mmap_lock &anon_vma->rwsem percpu_counters_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock irq_context: 0 fs_reclaim key irq_context: 0 fs_reclaim pcpu_lock irq_context: 0 fs_reclaim percpu_counters_lock irq_context: 0 fs_reclaim pcpu_lock stock_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock quarantine_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &tn->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &acomp_ctx->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_node_0 irq_context: 0 misc_mtx system_transition_mutex batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_SIT#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) rcu_node_0 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &tree->lock irq_context: 0 misc_mtx system_transition_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &nf_conntrack_locks[i] irq_context: 0 misc_mtx system_transition_mutex kfence_freelist_lock irq_context: 0 system_transition_mutex &meta->lock irq_context: 0 system_transition_mutex kfence_freelist_lock irq_context: 0 system_transition_mutex device_hotplug_lock console_owner_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF slock-AF_CAIF irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF &this->info_list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF (console_sem).lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &acomp_ctx->mutex irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &acomp_ctx->mutex &pool->lock#3 irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &acomp_ctx->mutex &pool->lock#3 &zspage->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1100 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_SIT irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &pcp->lock &zone->lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#159 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_owner irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1100 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: softirq (&peer->timer_persistent_keepalive) tk_core.seq.seqcount irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 &disk->open_mutex uevent_sock_mutex &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock quarantine_lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &cfs_rq->removed.lock irq_context: softirq &tx->clean_lock &base->lock irq_context: softirq &tx->clean_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock &c->lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1098 irq_context: 0 &data->open_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &meta->lock irq_context: 0 kn->active#10 &____s->seqcount#2 irq_context: 0 kn->active#10 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_TUNNEL#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_TUNNEL irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 kn->active#10 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#10 &____s->seqcount irq_context: 0 cb_lock genl_mutex rcu_read_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#3 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock &c->lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &____s->seqcount#2 irq_context: softirq (&peer->timer_send_keepalive) irq_context: softirq (&peer->timer_send_keepalive) &c->lock irq_context: softirq (&peer->timer_send_keepalive) pool_lock#2 irq_context: softirq (&peer->timer_send_keepalive) &list->lock#14 irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 &ep->mtx rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)rcu_gp &rq->__lock irq_context: 0 (wq_completion)rcu_gp &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &p->lock &of->mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex klist_remove_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock deferred_probe_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock device_links_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_NFC &rq->__lock irq_context: 0 sk_lock-AF_NFC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NFC &dev->mutex &rq->__lock irq_context: 0 sk_lock-AF_NFC &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock pool_lock#2 irq_context: 0 raw_lock irq_context: 0 sk_lock-AF_IEEE802154 irq_context: 0 sk_lock-AF_IEEE802154 slock-AF_IEEE802154 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex devnet_rename_sem percpu_counters_lock irq_context: 0 slock-AF_IEEE802154 irq_context: 0 &sb->s_type->i_mutex_key#10 raw_lock irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_IEEE802154 irq_context: 0 fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 key#24 irq_context: 0 &ctx->uring_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &tb->tb6_lock &meta->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock pool_lock#2 irq_context: 0 sk_lock-AF_IEEE802154 rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_IEEE802154 rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_IEEE802154 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_retransmit_handshake) &peer->endpoint_lock &obj_hash[i].lock irq_context: softirq (&peer->timer_retransmit_handshake) &peer->endpoint_lock pool_lock#2 irq_context: softirq &peer->endpoint_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &base->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rnp->exp_wq[3] irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 sb_writers#10 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#10 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#10 tomoyo_ss &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 sb_writers#10 tomoyo_ss quarantine_lock irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF percpu_counters_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &zone->lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex.wait_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &p->pi_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &nbd->config_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 &nbd->config_lock kfence_freelist_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 &xa->xa_lock#23 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&mp->timer) &br->multicast_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock quarantine_lock irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_IEEE802154 &rq->__lock irq_context: 0 sk_lock-AF_IEEE802154 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1101 irq_context: 0 rds_sock_lock irq_context: 0 rds_cong_monitor_lock irq_context: 0 rds_cong_lock irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_RDS irq_context: 0 &sb->s_type->i_mutex_key#10 &rs->rs_recv_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rds_cong_monitor_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rds_cong_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rs->rs_lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rs->rs_rdma_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &q->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rds_sock_lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock &meta->lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock kfence_freelist_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock quarantine_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#21/1 &xa->xa_lock#5 irq_context: 0 &type->s_umount_key#21/1 &xa->xa_lock#5 pool_lock#2 irq_context: 0 &type->s_umount_key#21/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#21/1 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 &ei->xattr_sem irq_context: 0 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &mapping->i_mmap_rwsem irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &journal->j_state_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_raw_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &journal->j_wait_updates irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_raw_lock irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates irq_context: 0 &sb->s_type->i_mutex_key#8 &sb->s_type->i_lock_key#22 irq_context: 0 &sb->s_type->i_mutex_key#8 &wb->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#8 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 fs_reclaim irq_context: 0 sb_writers#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 rcu_read_lock &____s->seqcount#4 irq_context: 0 sb_writers#3 &zone->lock irq_context: 0 sb_writers#3 &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock irq_context: 0 sb_writers#3 &p->alloc_lock irq_context: 0 sb_writers#3 rcu_read_lock &____s->seqcount#3 irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 stock_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &____s->seqcount irq_context: 0 sb_writers#3 &f->f_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &x->wait#2 irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &n->list_lock &c->lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 &data->open_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu pool_lock#2 irq_context: 0 rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &p->lock &of->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock irq_context: 0 tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#4 tomoyo_ss &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex pool_lock#2 irq_context: softirq rcu_callback &n->list_lock irq_context: softirq rcu_callback &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_retransmit_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_send_keepalive) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_new_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_zero_key_material) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_persistent_keepalive) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (work_completion)(&peer->clear_peer_work) irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1101 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock napi_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &list->lock#14 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu rcu_node_0 irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex raw_notifier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bcm_notifier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex isotp_notifier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &bond->stats_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex &lock->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 pool_lock#2 irq_context: 0 file_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex napi_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex req_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#11 irq_context: 0 &type->i_mutex_dir_key/1 irq_context: 0 &type->i_mutex_dir_key/1 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#75 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#75 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#75 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tap_major->minor_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&port->bc_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &ipvlan->addrs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#3/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&port->wq) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#4 oom_adj_mutex &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 fs_reclaim &cfs_rq->removed.lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_send_keepalive) &n->list_lock irq_context: softirq (&peer->timer_send_keepalive) &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: softirq &(&fw_cache.work)->timer irq_context: softirq &(&fw_cache.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&fw_cache.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&fw_cache.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&fw_cache.work)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&fw_cache.work)->work) &fw_cache.name_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1101 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF pcpu_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tn->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_node_0 irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1099 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_pool_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_pool_mutex &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_mayday_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &x->wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &r->consumer_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock &obj_hash[i].lock irq_context: 0 rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &x->wait#24 irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1099 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 ebt_mutex &rq->__lock irq_context: 0 ebt_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex input_pool.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&slave->notify_work)->work) irq_context: 0 &root->kernfs_rwsem &p->pi_lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem remove_cache_srcu pool_lock#2 irq_context: 0 &ids->rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &bond->ipsec_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex crngs.lock irq_context: 0 &type->i_mutex_dir_key#5 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss &rcu_state.expedited_wq irq_context: 0 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->notification_waitq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal batched_entropy_u8.lock irq_context: 0 sb_writers#3 sb_internal kfence_freelist_lock irq_context: 0 sb_writers#3 sb_internal &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_NETROM#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF &cfs_rq->removed.lock irq_context: 0 loop_validate_mutex &lo->lo_mutex &rq->__lock irq_context: 0 loop_validate_mutex &lo->lo_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex &lock->wait_lock irq_context: 0 &lo->lo_mutex &rq->__lock irq_context: 0 loop_validate_mutex &lock->wait_lock irq_context: 0 loop_validate_mutex &p->pi_lock irq_context: 0 loop_validate_mutex &p->pi_lock &rq->__lock irq_context: 0 &lo->lo_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 loop_validate_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1099 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#660 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#660 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &c->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex rcu_node_0 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#677 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_NFC fs_reclaim irq_context: 0 sk_lock-AF_NFC fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_NFC pool_lock#2 irq_context: 0 sk_lock-AF_NFC &local->sdp_lock irq_context: 0 sk_lock-AF_NFC &local->sdp_lock &local->sockets.lock irq_context: 0 sk_lock-AF_NFC &local->sockets.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_NFC &local->sockets.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex devnet_rename_sem pcpu_lock stock_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#159 &devlink_port->type_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &rq->__lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_wq[0] irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF &x->wait#2 irq_context: 0 sk_lock-AF_VSOCK remove_cache_srcu pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->uring_lock rcu_read_lock &rq->__lock irq_context: 0 &ctx->uring_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex kfence_freelist_lock irq_context: 0 rtnl_mutex dev_addr_sem batched_entropy_u8.lock irq_context: 0 rtnl_mutex dev_addr_sem kfence_freelist_lock irq_context: 0 rtnl_mutex dev_addr_sem &meta->lock irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 &ep->mtx remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 nf_sockopt_mutex rcu_read_lock &rq->__lock irq_context: 0 nf_sockopt_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 link_idr_lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pcpu_lock irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 misc_mtx rfkill_global_mutex &____s->seqcount#2 irq_context: 0 misc_mtx rfkill_global_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu rcu_node_0 irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu stock_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem percpu_counters_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &ep->mtx rcu_read_lock rcu_node_0 irq_context: 0 &ep->mtx rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fs_reclaim &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_CAIF &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1108 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx quarantine_lock irq_context: 0 rose_node_list_lock irq_context: 0 rose_node_list_lock rose_neigh_list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#289 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 rtnl_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_node_0 irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock bit_wait_table + i irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&w->w) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &c->lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &n->list_lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override pool_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 cb_lock quarantine_lock irq_context: 0 cb_lock remove_cache_srcu irq_context: 0 cb_lock remove_cache_srcu quarantine_lock irq_context: 0 cb_lock remove_cache_srcu &c->lock irq_context: 0 cb_lock remove_cache_srcu &n->list_lock irq_context: 0 cb_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock rcu_node_0 irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &group->mark_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rq->__lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim pool_lock#2 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &c->lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &pcp->lock &zone->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount irq_context: 0 &ep->mtx remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1108 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 cb_lock genl_mutex fs_reclaim rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu key irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &c->lock batched_entropy_u8.lock irq_context: 0 &nbd->config_lock &lock->wait_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 system_transition_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex remove_cache_srcu &base->lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex bdev_lock &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &n->list_lock &c->lock irq_context: 0 &sighand->siglock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu percpu_counters_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock key irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->s_umount_key#46/1 &n->list_lock irq_context: 0 &type->s_umount_key#46/1 &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &____s->seqcount#2 irq_context: 0 sb_writers#8 &c->lock irq_context: 0 sb_writers#8 &n->list_lock irq_context: 0 sb_writers#8 &n->list_lock &c->lock irq_context: 0 sb_writers#8 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)rcu_gp &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp &obj_hash[i].lock irq_context: softirq (&peer->timer_persistent_keepalive) &n->list_lock irq_context: softirq (&peer->timer_persistent_keepalive) &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fill_pool_map-wait-type-override batched_entropy_u8.lock irq_context: 0 fill_pool_map-wait-type-override kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 rcu_read_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#8 irq_context: softirq (&n->timer) irq_context: softirq (&n->timer) &n->lock irq_context: softirq (&n->timer) &n->lock &obj_hash[i].lock irq_context: softirq (&n->timer) &n->lock &base->lock irq_context: softirq (&n->timer) &n->lock &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rq->__lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET k-clock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET#2 pool_lock#2 irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock &c->lock irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock smack_known_lock.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock.wait_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &n->list_lock &c->lock irq_context: 0 rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex gdp_mutex &c->lock irq_context: 0 rtnl_mutex gdp_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex gdp_mutex &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events fqdir_free_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sn->pipefs_sb_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sem->wait_lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem tcp_metrics_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&net->xfrm.policy_hash_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->xfrm.xfrm_policy_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&net->xfrm.state_hash_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xa->xa_lock#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem genl_sk_destructing_waitq.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock &c->lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (work_completion)(&(&ctx->fallback_work)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &meta->lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 (work_completion)(&(&ctx->fallback_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem fs_reclaim &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work &dir->lock irq_context: 0 (wq_completion)netns net_cleanup_work &dir->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work &dir->lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 batched_entropy_u8.lock irq_context: 0 &type->i_mutex_dir_key#5 kfence_freelist_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 &group->mark_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 rtnl_mutex _xmit_ETHER console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &ids->rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &ids->rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &____s->seqcount#2 irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &pernet->lock irq_context: 0 cb_lock &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#14 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#14 &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &c->lock kfence_freelist_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &sem->wait_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu quarantine_lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu &c->lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NETLINK &rq->__lock irq_context: 0 sk_lock-AF_NETLINK &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &sem->wait_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &sem->wait_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex &rq->__lock irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &ul->lock#2 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rcu_state.expedited_wq irq_context: 0 &type->i_mutex_dir_key#5 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ei->i_data_sem &rq->__lock irq_context: 0 &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex devnet_rename_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock stock_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock key irq_context: 0 &sig->cred_guard_mutex rcu_read_lock pcpu_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1105 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1106 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 system_transition_mutex device_hotplug_lock console_owner irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem batched_entropy_u8.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &base->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 &knet->mutex irq_context: 0 &ep->mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mux->lock irq_context: 0 rtnl_mutex net_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex net_rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mux->rx_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_KCM irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_KCM slock-AF_KCM irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &rq->__lock irq_context: 0 rcu_read_lock &rq->__lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &rq->__lock &base->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock quarantine_lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_read_lock &pool->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 rcu_read_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 remove_cache_srcu &meta->lock irq_context: 0 remove_cache_srcu kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 &xt[i].mutex &base->lock irq_context: 0 &xt[i].mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &meta->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ep->mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 sb_internal batched_entropy_u8.lock crngs.lock irq_context: 0 &root->kernfs_rwsem rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock krc.lock &obj_hash[i].lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock krc.lock &base->lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#8 &p->pi_lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#8 &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#8 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &hdev->lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock console_owner_lock irq_context: 0 cb_lock console_owner irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &bgl->locks[i].lock rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &bgl->locks[i].lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &bgl->locks[i].lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 sb_writers#7 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 &disk->open_mutex &nbd->config_lock &obj_hash[i].lock pool_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &n->list_lock &c->lock irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex.wait_lock irq_context: 0 sk_lock-AF_PACKET &p->pi_lock irq_context: 0 sk_lock-AF_PACKET &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_wq[3] irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex quarantine_lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex &rq->__lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &sqd->lock irq_context: 0 &ctx->uring_lock irq_context: 0 &ctx->uring_lock fs_reclaim irq_context: 0 &ctx->uring_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ctx->uring_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &ctx->uring_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->uring_lock remove_cache_srcu irq_context: 0 &ctx->uring_lock remove_cache_srcu quarantine_lock irq_context: 0 &ctx->uring_lock remove_cache_srcu &c->lock irq_context: 0 &ctx->uring_lock remove_cache_srcu &n->list_lock irq_context: 0 &ctx->uring_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &ctx->uring_lock remove_cache_srcu &rq->__lock irq_context: 0 &ctx->uring_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->uring_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &ctx->uring_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ctx->uring_lock pool_lock#2 irq_context: 0 &xa->xa_lock#20 irq_context: 0 &xa->xa_lock#20 pool_lock#2 irq_context: 0 &xa->xa_lock#20 &c->lock irq_context: 0 &sqd->lock &sqd->wait irq_context: 0 &sqd->lock &rq->__lock irq_context: 0 &sqd->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock pool_lock#2 irq_context: 0 &ctx->uring_lock &rq->__lock irq_context: 0 &ctx->uring_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->uring_lock &table->hbs[i].lock irq_context: 0 &ctx->completion_lock irq_context: 0 &ctx->completion_lock &ctx->timeout_lock irq_context: 0 &(&ctx->fallback_work)->timer irq_context: 0 (work_completion)(&(&ctx->fallback_work)->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &acct->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &wq->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->completion_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &table->hbs[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->completion_lock &ctx->timeout_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &acct->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &wq->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &ctx->completion_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &x->wait#28 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xa->xa_lock#20 &obj_hash[i].lock irq_context: 0 &x->wait#29 irq_context: 0 &hash->wait irq_context: 0 &acct->lock irq_context: 0 &sqd->lock key#26 irq_context: 0 &sqd->lock &x->wait#29 irq_context: 0 &sqd->lock &hash->wait irq_context: 0 &sqd->lock cpu_hotplug_lock irq_context: 0 &sqd->lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 &sqd->lock &acct->lock irq_context: 0 &sqd->lock &obj_hash[i].lock irq_context: 0 &sqd->lock pool_lock#2 irq_context: 0 &sqd->lock percpu_counters_lock irq_context: 0 &sqd->lock pcpu_lock irq_context: 0 &x->wait#30 irq_context: softirq rcu_callback rcu_read_lock &x->wait#28 irq_context: softirq rcu_callback rcu_read_lock &x->wait#28 &p->pi_lock irq_context: softirq rcu_callback rcu_read_lock &x->wait#28 &p->pi_lock &rq->__lock irq_context: softirq rcu_callback rcu_read_lock &x->wait#28 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) (&timer.timer) irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &x->wait#30 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) stock_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) key irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) percpu_counters_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) pcpu_lock stock_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) percpu_ref_switch_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &xa->xa_lock#21 irq_context: softirq rcu_callback put_task_map-wait-type-override percpu_counters_lock irq_context: softirq rcu_callback put_task_map-wait-type-override pcpu_lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xa->xa_lock#20 &n->list_lock irq_context: 0 &xa->xa_lock#20 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xa->xa_lock#20 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &cfs_rq->removed.lock irq_context: 0 &sqd->lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) pool_lock irq_context: 0 &ctx->uring_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &xa->xa_lock#20 &____s->seqcount#2 irq_context: 0 &xa->xa_lock#20 &pcp->lock &zone->lock irq_context: 0 &xa->xa_lock#20 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &xa->xa_lock#20 &____s->seqcount irq_context: 0 &ctx->uring_lock &obj_hash[i].lock irq_context: 0 &ctx->uring_lock &lock->wait_lock irq_context: softirq rcu_callback rcu_read_lock &x->wait#28 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rq->__lock cpu_asid_lock irq_context: 0 &ctx->uring_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback rcu_read_lock &x->wait#28 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#13 irq_context: 0 sb_writers#13 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 sb_writers#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) quarantine_lock irq_context: 0 pcpu_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock quarantine_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#13 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem pool_lock#2 irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &lock->wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET6 &cfs_rq->removed.lock irq_context: 0 ebt_mutex &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 ebt_mutex &mm->mmap_lock &obj_hash[i].lock irq_context: 0 ebt_mutex &mm->mmap_lock pool_lock#2 irq_context: 0 ebt_mutex &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_VSOCK fs_reclaim irq_context: 0 sk_lock-AF_VSOCK fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_VSOCK pool_lock#2 irq_context: 0 sk_lock-AF_VSOCK vsock_table_lock irq_context: 0 sk_lock-AF_VSOCK vsock_table_lock batched_entropy_u32.lock irq_context: 0 sk_lock-AF_VSOCK &vvs->rx_lock irq_context: 0 sk_lock-AF_VSOCK &obj_hash[i].lock irq_context: 0 sk_lock-AF_VSOCK &c->lock irq_context: 0 sk_lock-AF_VSOCK &n->list_lock irq_context: 0 sk_lock-AF_VSOCK &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &cfs_rq->removed.lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) irq_context: 0 misc_mtx rfkill_global_mutex &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex irq_context: 0 misc_mtx rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &rq->__lock irq_context: 0 wq_pool_attach_mutex wq_pool_attach_mutex.wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex.wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_attach_mutex.wait_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fs_reclaim &obj_hash[i].lock pool_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock cpu_asid_lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_read_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 pcpu_alloc_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 stock_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &rq->__lock irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 &____s->seqcount#2 irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 &pcp->lock &zone->lock irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ids->rwsem lock rcu_read_lock &new->lock#2 &____s->seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &child->perf_event_mutex &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 pcpu_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mount_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &child->perf_event_mutex &rq->__lock irq_context: 0 &child->perf_event_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &c->lock irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 cgroup_threadgroup_rwsem stock_lock irq_context: 0 cgroup_threadgroup_rwsem pcpu_lock stock_lock irq_context: 0 nf_nat_proto_mutex &n->list_lock irq_context: 0 nf_nat_proto_mutex &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &cfs_rq->removed.lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &c->lock irq_context: 0 &child->perf_event_mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &meta->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem key irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pcpu_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &rnp->exp_lock irq_context: 0 rcu_state.exp_mutex irq_context: 0 rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[1] irq_context: 0 misc_mtx &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mount_lock mount_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &cfs_rq->removed.lock irq_context: 0 &u->iolock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbinfo->stat_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &ep->mtx &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock key irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock pcpu_lock stock_lock irq_context: softirq rcu_callback put_task_map-wait-type-override per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 tomoyo_ss rcu_read_lock key irq_context: 0 sk_lock-AF_INET rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock pcpu_lock irq_context: 0 tomoyo_ss rcu_read_lock percpu_counters_lock irq_context: 0 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &base->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &meta->lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_node_0 irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex remove_cache_srcu &rq->__lock irq_context: 0 &group->mark_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#4 oom_adj_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &c->lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &fsnotify_mark_srcu irq_context: 0 &type->i_mutex_dir_key/1 &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key/1 &sbinfo->stat_lock irq_context: 0 &type->i_mutex_dir_key/1 &xa->xa_lock#9 irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock pool_lock#2 irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock &n->list_lock irq_context: softirq (&lapb->t1timer) &lapb->lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock pool_lock#2 irq_context: 0 &u->iolock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rq->__lock cpu_asid_lock irq_context: 0 system_transition_mutex &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_node_0 irq_context: 0 system_transition_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) kfence_freelist_lock irq_context: 0 sb_writers#7 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock pool_lock#2 irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 &u->iolock &mm->mmap_lock &rq->__lock irq_context: 0 &u->iolock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 kfence_freelist_lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem (&timer.timer) irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fw_lock &x->wait#22 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem subsys mutex#73 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem deferred_probe_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem device_links_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) fw_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex reg_indoor_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex krc.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex reg_requests_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex reg_pending_beacons_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 kfence_freelist_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 &ep->mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 pcpu_alloc_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_node_0 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock fs_reclaim &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock fs_reclaim pool_lock#2 irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rcu_read_lock &sighand->siglock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock stock_lock irq_context: 0 &mm->mmap_lock rcu_read_lock key irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock stock_lock irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) &ndev->lock &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) &ndev->lock &base->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock smack_known_lock.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &cfs_rq->removed.lock irq_context: 0 &p->lock fs_reclaim &rq->__lock irq_context: 0 &p->lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock quarantine_lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock quarantine_lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq net/wireless/reg.c:533 irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex reg_indoor_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex krc.lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex reg_requests_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex reg_pending_beacons_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events reg_work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events reg_work rtnl_mutex &c->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_KCM clock-AF_KCM irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_KCM irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) pool_lock#2 irq_context: 0 key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp rcu_node_0 irq_context: 0 &vma->vm_lock->lock rcu_read_lock stock_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_node_0 irq_context: 0 sb_writers#7 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock key irq_context: 0 rcu_read_lock &vma->vm_lock->lock pcpu_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#7 &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss pool_lock#2 irq_context: 0 rcu_read_lock &sighand->siglock &n->list_lock irq_context: 0 rcu_read_lock &sighand->siglock &n->list_lock &c->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &n->list_lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock stock_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx key irq_context: 0 misc_mtx pcpu_lock irq_context: 0 misc_mtx percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &rq->__lock &obj_hash[i].lock irq_context: 0 &rq->__lock &base->lock irq_context: 0 &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#4 &base->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 pcpu_lock stock_lock irq_context: 0 sb_writers#3 &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle bit_wait_table + i irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem cpu_add_remove_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem cpu_add_remove_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &kernfs_locks->open_file_mutex[count] &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &c->lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &pl->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &pl->lock key#12 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 key#14 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &pcp->lock &zone->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#9 &rq->__lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &mm->page_table_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &tsk->futex_exit_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 cb_lock remove_cache_srcu &rq->__lock irq_context: 0 cb_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock pool_lock#2 irq_context: 0 tasklist_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &ep->mtx &____s->seqcount#2 irq_context: 0 &type->s_umount_key#47 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &kernfs_locks->open_file_mutex[count] stock_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] key irq_context: 0 &kernfs_locks->open_file_mutex[count] pcpu_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] percpu_counters_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] pcpu_lock stock_lock irq_context: 0 &hdev->req_lock &hdev->lock &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &data->open_mutex &sem->wait_lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &sem->wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &data->open_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 &dir->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &dir->lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &____s->seqcount irq_context: 0 &hdev->lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss percpu_counters_lock irq_context: 0 &p->lock &of->mutex kn->active#4 rcu_read_lock rcu_node_0 irq_context: 0 &p->lock &of->mutex kn->active#4 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_node_0 irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_node_0 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &meta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 &hdev->req_lock &hdev->lock dpm_list_mtx &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx.wait_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &p->pi_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &n->list_lock &c->lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 namespace_sem pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 namespace_sem pcpu_alloc_mutex &rq->__lock irq_context: 0 namespace_sem pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &mux->lock irq_context: 0 &ep->mtx remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 &ep->mtx remove_cache_srcu pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex (console_sem).lock irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner_lock irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock stock_lock irq_context: 0 &type->s_umount_key#45 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#16 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex.wait_lock irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &n->list_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#49 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &list->lock#12 irq_context: 0 &group->mark_mutex &n->list_lock irq_context: 0 &group->mark_mutex &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 kn->active#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex quarantine_lock irq_context: 0 sb_writers#5 &fsnotify_mark_srcu &rq->__lock irq_context: 0 sb_writers#5 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh key#20 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &entry->crc_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &____s->seqcount#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 irq_context: 0 sb_writers#3 &mm->mmap_lock &sb->s_type->i_lock_key irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) crngs.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock &rnp->exp_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &sem->wait_lock irq_context: 0 &disk->open_mutex &p->pi_lock irq_context: 0 &disk->open_mutex &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem &n->list_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 irq_context: 0 cgroup_file_kn_lock irq_context: 0 cgroup_rstat_lock irq_context: 0 cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sb->s_type->i_mutex_key#10 (work_completion)(&kcm->tx_work) irq_context: 0 &sb->s_type->i_mutex_key#10 &mux->rx_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &mux->rx_lock rlock-AF_KCM irq_context: 0 &sb->s_type->i_mutex_key#10 &knet->mutex irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1118 irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock sk_lock-AF_TIPC irq_context: 0 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &rq->__lock irq_context: 0 &mapping->i_mmap_rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mapping->i_mmap_rwsem ptlock_ptr(ptdesc)#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock pcpu_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy341 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1104 irq_context: 0 sb_writers#3 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex (inetaddr_chain).rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex (inetaddr_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock quarantine_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 &fsnotify_mark_srcu fs_reclaim &rq->__lock irq_context: 0 &fsnotify_mark_srcu fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work ima_keys_lock irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock kfence_freelist_lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &f->f_pos_lock key irq_context: 0 &f->f_pos_lock pcpu_lock irq_context: 0 rtnl_mutex &br->hash_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &br->hash_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock percpu_counters_lock irq_context: 0 &f->f_pos_lock pool_lock#2 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 kfence_freelist_lock irq_context: 0 cb_lock fs_reclaim &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &meta->lock irq_context: 0 sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 jbd2_handle irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 &c->lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle bit_wait_table + i irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &ret->b_state_lock irq_context: 0 &group->mark_mutex fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock &info->lock irq_context: 0 sb_writers#3 &mm->mmap_lock lock#4 irq_context: 0 sb_writers#3 &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem key#15 irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uts_sem &rq->__lock irq_context: 0 uts_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 pcpu_lock stock_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 file_rwsem &cfs_rq->removed.lock irq_context: 0 file_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_wq[1] irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 &n->list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 &dev->mutex pinctrl_list_mutex &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rq->__lock cpu_asid_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &obj_hash[i].lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 remove_cache_srcu &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock crngs.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &meta->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ret->b_state_lock bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &lock->wait_lock irq_context: 0 sb_writers#7 kn->active#4 &p->pi_lock irq_context: 0 sb_writers#7 kn->active#4 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &base->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &____s->seqcount irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 remove_cache_srcu per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock quarantine_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#4 &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &sem->wait_lock irq_context: 0 cb_lock genl_mutex triggers_list_lock &rq->__lock irq_context: 0 cb_lock genl_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &q->instances_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &log->instances_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &nft_net->commit_mutex irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_VSOCK fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sk_lock-AF_VSOCK fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock rlock-AF_INET irq_context: 0 &sig->cred_guard_mutex &fs->lock &dentry->d_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key &n->list_lock &c->lock irq_context: 0 rtnl_mutex dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 rtnl_mutex dpm_list_mtx &rq->__lock irq_context: 0 rtnl_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock kfence_freelist_lock irq_context: 0 &type->s_umount_key#46/1 fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key &c->lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_VSOCK batched_entropy_u8.lock irq_context: 0 &type->s_umount_key#46/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 (wq_completion)events reg_work rtnl_mutex.wait_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events reg_work &p->pi_lock irq_context: 0 (wq_completion)events reg_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events reg_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &____s->seqcount irq_context: 0 &dentry->d_lock &sb->s_type->i_lock_key#24 &lru->node[i].lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#20 &n->list_lock irq_context: 0 kn->active#20 &n->list_lock &c->lock irq_context: 0 kn->active#22 &c->lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex.wait_lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock &rq->__lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#7 tomoyo_ss batched_entropy_u8.lock irq_context: 0 sb_writers#7 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#7 tomoyo_ss &meta->lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &xa->xa_lock#5 irq_context: 0 &mm->mmap_lock &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &sb->s_type->i_lock_key irq_context: 0 &mm->mmap_lock &s->s_inode_list_lock irq_context: 0 &mm->mmap_lock batched_entropy_u32.lock irq_context: 0 &mm->mmap_lock &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override batched_entropy_u8.lock irq_context: 0 kn->active#22 &n->list_lock irq_context: 0 kn->active#22 &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sk_lock-AF_VSOCK kfence_freelist_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events fqdir_free_work &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle irq_context: 0 rtnl_mutex &tn->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 &ep->mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &meta->lock irq_context: 0 rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &dir->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM stock_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &pcp->lock &zone->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 rtnl_mutex netpoll_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex netpoll_srcu key irq_context: 0 rtnl_mutex netpoll_srcu pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex netpoll_srcu percpu_counters_lock irq_context: 0 rtnl_mutex netpoll_srcu pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim pool_lock#2 irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#23 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex stock_lock irq_context: 0 rtnl_mutex pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 &sb->s_type->i_lock_key &xa->xa_lock#9 &obj_hash[i].lock pool_lock irq_context: 0 cb_lock remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 loop_validate_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: softirq rcu_read_lock batched_entropy_u8.lock crngs.lock irq_context: softirq rcu_read_lock batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 ebt_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 ebt_mutex rcu_read_lock &rq->__lock irq_context: 0 ebt_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &journal->j_state_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_node_0 irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock &____s->seqcount#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 file_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock quarantine_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex deferred_probe_mutex &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_lock_key &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq (&timer) &txwq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX pool_lock irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#46/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#6 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#6 &rq->__lock irq_context: 0 kn->active#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock quarantine_lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &n->list_lock irq_context: 0 &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock kfence_freelist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock vmap_purge_lock.wait_lock irq_context: 0 &dev->mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 oom_adj_mutex oom_adj_mutex.wait_lock irq_context: 0 sb_writers#4 oom_adj_mutex.wait_lock irq_context: 0 sb_writers#4 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 lweventlist_lock irq_context: 0 &data->open_mutex rfkill_global_mutex quarantine_lock irq_context: 0 &hdev->req_lock &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock stock_lock irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)writeback &rq->__lock irq_context: 0 (wq_completion)writeback &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex dpm_list_mtx irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex rcu_read_lock &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &____s->seqcount#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock sched_map-wait-type-override &pool->lock irq_context: softirq (&journal->j_commit_timer) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#3 irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &base->lock irq_context: 0 kn->active#7 &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock pool_lock#2 irq_context: 0 &xt[i].mutex fs_reclaim pcpu_lock stock_lock irq_context: 0 rcu_state.barrier_mutex rcu_node_0 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: softirq (&app->join_timer) &app->lock batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: softirq (&ndev->rs_timer) &ndev->lock batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock &n->list_lock irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 rcu_state.exp_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &sem->wait_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sem->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &p->pi_lock irq_context: 0 &child->perf_event_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock sk_lock-AF_TIPC slock-AF_TIPC irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ctx->uring_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 rcu_node_0 irq_context: 0 sk_lock-AF_INET &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock slock-AF_TIPC irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 misc_mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu quarantine_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i]/1 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1118 &rq->__lock irq_context: 0 &type->s_umount_key#21/1 &n->list_lock irq_context: 0 &type->s_umount_key#21/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex stock_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&ht->run_work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&ht->run_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &rnp->exp_wq[2] irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 uevent_sock_mutex &c->lock irq_context: 0 uevent_sock_mutex &n->list_lock irq_context: 0 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#6 stock_lock irq_context: 0 sb_writers#6 key irq_context: 0 sb_writers#6 pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock quarantine_lock irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) free_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) free_vmap_area_lock pool_lock#2 irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sb_writers#6 percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &xt[i].mutex fs_reclaim &obj_hash[i].lock irq_context: 0 &xt[i].mutex fs_reclaim key irq_context: 0 &xt[i].mutex fs_reclaim pcpu_lock irq_context: 0 &xt[i].mutex fs_reclaim percpu_counters_lock irq_context: 0 &xt[i].mutex fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 sk_lock-AF_PACKET &rnp->exp_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[3] irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_node_0 irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 sk_lock-AF_PACKET init_mm.page_table_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock &rq->__lock irq_context: 0 rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 rcu_read_lock_bh _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh _xmit_ETHER#2 pool_lock#2 irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock fs_reclaim rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock key irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock pool_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_node_0 irq_context: 0 sb_writers#6 pcpu_lock stock_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#4 oom_adj_mutex &rq->__lock cpu_asid_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 pfkey_mutex &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 pfkey_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_VSOCK remove_cache_srcu irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &____s->seqcount#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock irq_context: 0 &group->mark_mutex batched_entropy_u8.lock irq_context: 0 &wb->list_lock &sb->s_type->i_lock_key#24 irq_context: 0 &group->mark_mutex kfence_freelist_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &lock->wait_lock irq_context: 0 kn->active#4 &lock->wait_lock irq_context: 0 kn->active#4 &p->pi_lock irq_context: 0 kn->active#4 &p->pi_lock &rq->__lock irq_context: 0 kn->active#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock dpm_list_mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 &p->lock key irq_context: 0 &p->lock pcpu_lock irq_context: 0 &p->lock percpu_counters_lock irq_context: softirq (&sk->sk_timer) irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 &base->lock irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &rq->__lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_node_0 irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_probe_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &meta->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work kfence_freelist_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_wq[1] irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex &rq->__lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex rcu_node_0 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock &c->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#52 &n->list_lock irq_context: 0 kn->active#52 &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 misc_mtx system_transition_mutex system_transition_mutex.wait_lock irq_context: 0 misc_mtx system_transition_mutex &rq->__lock irq_context: 0 misc_mtx system_transition_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 system_transition_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work rcu_node_0 irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound connector_reaper_work rcu_node_0 irq_context: 0 misc_mtx pcpu_lock stock_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 sb_writers#3 &base->lock irq_context: 0 sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 &group->mark_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &____s->seqcount irq_context: 0 &group->mark_mutex &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rnp->exp_wq[2] irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &c->lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (gc_work).work rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &tn->lock &rq->__lock irq_context: 0 rtnl_mutex &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex smc_ib_devices.mutex &rq->__lock irq_context: 0 rtnl_mutex smc_ib_devices.mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock pool_lock#2 irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &sig->cred_guard_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#46/1 list_lrus_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &xa->xa_lock#4 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &ret->b_state_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mount_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex bpf_devs_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &idev->mc_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_persistent_keepalive) &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ep->mtx &rcu_state.expedited_wq irq_context: 0 &ep->mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ep->mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ep->mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#46/1 list_lrus_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &mapping->i_private_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 fs_reclaim &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &p->pi_lock irq_context: softirq (&peer->timer_persistent_keepalive) &____s->seqcount irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem pcpu_lock stock_lock irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock pcpu_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &group->mark_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock cpu_asid_lock irq_context: 0 &group->mark_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &group->mark_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET batched_entropy_u8.lock crngs.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) pool_lock#2 irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim pcpu_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex quarantine_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 lweventlist_lock &dir->lock#2 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &rq->__lock cpu_asid_lock irq_context: 0 &ids->rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu irq_context: 0 nfc_devlist_mutex remove_cache_srcu quarantine_lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &c->lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 misc_mtx rcu_node_0 irq_context: 0 &ids->rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_mutex wq_pool_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &dev->mutex rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock cpu_asid_lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock &base->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 pcpu_lock irq_context: 0 sb_writers#7 pool_lock irq_context: 0 sb_writers#7 percpu_counters_lock irq_context: 0 sb_writers#7 pcpu_lock stock_lock irq_context: softirq (&lapb->t1timer) &lapb->lock &list->lock#20 irq_context: softirq &list->lock#20 irq_context: softirq rcu_read_lock x25_neigh_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock pcpu_lock stock_lock irq_context: 0 nf_sockopt_mutex &cfs_rq->removed.lock irq_context: 0 nf_sockopt_mutex &obj_hash[i].lock irq_context: 0 nf_sockopt_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex key irq_context: 0 rtnl_mutex rcu_state.exp_mutex pcpu_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex percpu_counters_lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal key irq_context: 0 sb_writers#3 sb_internal pcpu_lock irq_context: 0 sb_writers#3 sb_internal percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock irq_context: 0 sb_writers#3 sb_internal pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem stock_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_sockopt_mutex rcu_node_0 irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem key irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem pcpu_lock irq_context: 0 misc_mtx remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem percpu_counters_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock rcu_node_0 irq_context: 0 nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex fwnode_link_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex fwnode_link_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 sb_writers#3 sb_internal quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events kernfs_notify_work &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_node_0 irq_context: 0 proto_tab_lock &n->list_lock irq_context: 0 proto_tab_lock &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &pcp->lock &zone->lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&pool->idle_timer) irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &lock->wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 sb_writers#3 remove_cache_srcu &meta->lock irq_context: 0 sb_writers#3 remove_cache_srcu kfence_freelist_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock rcu_node_0 irq_context: 0 nfc_devlist_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &meta->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#666 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rename_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &lock->wait_lock irq_context: softirq (&pool->idle_timer) &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &c->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rename_lock &dentry->d_lock irq_context: 0 &wq->mutex &rq->__lock irq_context: 0 &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex quarantine_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&pool->idle_timer) &pool->lock &obj_hash[i].lock irq_context: softirq (&pool->idle_timer) &pool->lock &base->lock irq_context: softirq (&pool->idle_timer) &pool->lock &base->lock &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) quarantine_lock irq_context: 0 &pipe->mutex/1 fs_reclaim &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock &new->lock#2 stock_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock kfence_freelist_lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &ul->lock#2 &c->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &p->pi_lock irq_context: 0 &root->kernfs_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &n->list_lock irq_context: 0 &xa->xa_lock#22 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 &xa->xa_lock#22 stock_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim pool_lock#2 irq_context: 0 kn->active#6 &____s->seqcount#2 irq_context: 0 kn->active#6 &pcp->lock &zone->lock irq_context: 0 kn->active#6 &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock tasklist_lock rcu_read_lock &sighand->siglock irq_context: 0 rcu_read_lock tasklist_lock &sighand->siglock irq_context: 0 rcu_read_lock tasklist_lock &sighand->siglock &p->pi_lock irq_context: 0 rcu_read_lock tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_node_0 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &xa->xa_lock#22 pool_lock#2 irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#666 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 lock map_idr_lock &n->list_lock irq_context: 0 lock map_idr_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &nbd->config_lock &q->mq_freeze_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xa->xa_lock#22 &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &____s->seqcount#2 irq_context: 0 rtnl_mutex pcpu_alloc_mutex fs_reclaim irq_context: 0 rtnl_mutex pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex pcpu_alloc_mutex &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 &xa->xa_lock#22 &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &iopt->domains_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &iopt->domains_rwsem &iopt->iova_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem pool_lock irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock kfence_freelist_lock irq_context: 0 sb_writers#6 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 &nbd->config_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &nbd->config_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &lock->wait_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu rcu_read_lock stock_lock irq_context: softirq (&peer->timer_send_keepalive) tk_core.seq.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 sb_writers#4 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock batched_entropy_u8.lock irq_context: 0 cb_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &ids->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &base->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &n->list_lock irq_context: 0 &data->open_mutex lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 bpf_devs_lock &rq->__lock irq_context: 0 bpf_devs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex netpoll_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 pcpu_lock stock_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &base->lock irq_context: 0 &xt[i].mutex fs_reclaim &cfs_rq->removed.lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock quarantine_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_VSOCK remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock &obj_hash[i].lock pool_lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)events deferred_process_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &base->lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#23 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &meta->lock irq_context: 0 &alarm_bases[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &p->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &wq->mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &type->s_umount_key#21/1 &rq->__lock irq_context: 0 sb_writers#4 batched_entropy_u8.lock irq_context: 0 sb_writers#4 kfence_freelist_lock irq_context: 0 sb_writers#4 &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex gdp_mutex &rq->__lock irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex gdp_mutex.wait_lock irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 rtnl_mutex gdp_mutex remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex gdp_mutex.wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock cpu_asid_lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#23 &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#3 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#3 &rq->__lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 misc_mtx rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 btf_idr_lock irq_context: 0 &f->f_pos_lock &p->lock stock_lock irq_context: 0 &f->f_pos_lock &p->lock &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &p->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &p->lock sync_timeline_list_lock irq_context: 0 &f->f_pos_lock &p->lock sync_file_list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock stock_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &____s->seqcount#2 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &obj_hash[i].lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &meta->lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] pool_lock#2 irq_context: 0 sb_writers#7 iattr_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex.wait_lock irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex purge_vmap_area_lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &p->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_node_0 irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock x25_list_lock irq_context: softirq rcu_read_lock x25_forward_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#21 &n->list_lock irq_context: 0 kn->active#21 &n->list_lock &c->lock irq_context: 0 kn->active#21 &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &lock->wait_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &retval->lock irq_context: 0 &p->lock &of->mutex kn->active#4 batched_entropy_u8.lock irq_context: 0 &p->lock &of->mutex kn->active#4 kfence_freelist_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &meta->lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx &rcu_state.expedited_wq irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &new->lock#2 &obj_hash[i].lock irq_context: 0 rcu_read_lock &new->lock#2 pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex quarantine_lock irq_context: 0 misc_mtx remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sighand->siglock batched_entropy_u8.lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 remove_cache_srcu rcu_read_lock key irq_context: 0 remove_cache_srcu rcu_read_lock pcpu_lock irq_context: 0 remove_cache_srcu rcu_read_lock percpu_counters_lock irq_context: 0 remove_cache_srcu rcu_read_lock pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 misc_mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 misc_mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock pcpu_lock stock_lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 put_task_map-wait-type-override#2 &obj_hash[i].lock irq_context: 0 put_task_map-wait-type-override#2 pool_lock#2 irq_context: 0 misc_mtx pcpu_alloc_mutex rcu_node_0 irq_context: 0 misc_mtx pcpu_alloc_mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx pcpu_alloc_mutex &rq->__lock irq_context: 0 misc_mtx pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock pcpu_lock stock_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx batched_entropy_u8.lock crngs.lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock stock_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock key irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock pcpu_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock percpu_counters_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock pcpu_lock stock_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &meta->lock irq_context: 0 sb_writers#4 &cfs_rq->removed.lock irq_context: 0 &ctx->wqh irq_context: 0 &dev->mutex kn->active#4 irq_context: 0 &dev->mutex kn->active#4 &root->deactivate_waitq irq_context: 0 &dev->mutex kn->active#4 &rq->__lock irq_context: 0 &dev->mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock &rq->__lock irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss mount_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_node_0 irq_context: 0 &tsk->futex_exit_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback rcu_node_0 irq_context: 0 nfc_devlist_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ctx->wqh tk_core.seq.seqcount irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &p->lock remove_cache_srcu rcu_node_0 irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 misc_mtx sched_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 &pcp->lock rcu_read_lock &p->pi_lock irq_context: 0 &pcp->lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex dpm_list_mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx wq_pool_mutex rcu_node_0 irq_context: 0 misc_mtx wq_pool_mutex &rq->__lock irq_context: 0 misc_mtx wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 vlan_ioctl_mutex &rq->__lock irq_context: 0 vlan_ioctl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 nfc_devlist_mutex nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[2] irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &sb->s_type->i_lock_key#4 bit_wait_table + i irq_context: 0 misc_mtx rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ctx->wqh &obj_hash[i].lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&w->w) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1124 irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 nfc_devlist_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &new->lock#2 console_lock console_srcu console_owner &port_lock_key irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &pfk->dump_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &pfk->dump_lock &net->xfrm.xfrm_policy_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex rcu_node_0 irq_context: 0 rtnl_mutex _xmit_ETHER console_owner irq_context: 0 misc_mtx nfc_devlist_mutex batched_entropy_u8.lock irq_context: 0 misc_mtx nfc_devlist_mutex kfence_freelist_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex key irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex pcpu_lock irq_context: 0 &dev->mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &rcu_state.expedited_wq irq_context: 0 &u->iolock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 vlan_ioctl_mutex &mm->mmap_lock &sem->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &meta->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[0] irq_context: 0 &pcp->lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex key irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &disk->open_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock stock_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 &xt[i].mutex &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx quarantine_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1126 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &rnp->exp_wq[2] irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override &__ctx->lock irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_read_lock &pool->lock irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex percpu_counters_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock stock_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 &xt[i].mutex sched_map-wait-type-override &rq->__lock irq_context: 0 &xt[i].mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fs_reclaim &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock key irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock pcpu_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock percpu_counters_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock pcpu_lock stock_lock irq_context: 0 sb_writers#10 &pcp->lock &zone->lock irq_context: 0 sb_writers#10 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 vlan_ioctl_mutex &mm->mmap_lock &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rnp->exp_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock rcu_read_lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &sem->wait_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &meta->lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#5 stock_lock irq_context: 0 &type->i_mutex_dir_key#5 key irq_context: 0 &type->i_mutex_dir_key#5 pcpu_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &rnp->exp_wq[1] irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rcu_state.expedited_wq irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex dev_addr_sem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &br->hash_lock &n->list_lock irq_context: 0 rtnl_mutex &br->hash_lock &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rlock-AF_KEY irq_context: 0 (wq_completion)events free_ipc_work &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex stock_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 vlan_ioctl_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#165 irq_context: 0 sk_lock-AF_INET &meta->lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &rnp->exp_lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 &disk->open_mutex &lock->wait_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock &rnp->exp_wq[3] irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex key irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 stock_lock irq_context: 0 &u->iolock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &u->iolock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 key irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex device_links_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 pcpu_lock irq_context: 0 &type->i_mutex_dir_key#5 percpu_counters_lock irq_context: 0 &type->i_mutex_dir_key#5 pcpu_lock stock_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 percpu_counters_lock irq_context: 0 kn->active#50 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#4 &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 &dev->mutex rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &rnp->exp_wq[0] irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 pcpu_lock stock_lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &pcp->lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &pcp->lock &zone->lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1133 irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rnp->exp_wq[3] irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &rnp->exp_wq[2] irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events free_ipc_work &p->pi_lock irq_context: 0 (wq_completion)events free_ipc_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &cfs_rq->removed.lock irq_context: 0 kn->active#4 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock quarantine_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex purge_vmap_area_lock irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &____s->seqcount#7 irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rcu_state.gp_wq irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &hsr->list_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pidmap_lock &obj_hash[i].lock pool_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &n->list_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &n->list_lock &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#165 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &pcp->lock &zone->lock &____s->seqcount irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1133 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle lock#4 &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock &base->lock irq_context: softirq rcu_read_lock rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock krc.lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 kn->active#50 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem stock_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem key irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem pcpu_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem percpu_counters_lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kfence_freelist_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem pcpu_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock rcu_node_0 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 vlan_ioctl_mutex &sem->wait_lock irq_context: 0 tomoyo_ss rcu_read_lock stock_lock irq_context: 0 tomoyo_ss rcu_read_lock pcpu_lock stock_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 rtnl_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 rtnl_mutex fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex remove_cache_srcu &meta->lock irq_context: 0 rtnl_mutex remove_cache_srcu kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1126 &rq->__lock irq_context: 0 sk_lock-AF_INET stock_lock irq_context: 0 sk_lock-AF_INET key irq_context: 0 sk_lock-AF_INET pcpu_lock irq_context: 0 sk_lock-AF_INET percpu_counters_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 &group->inotify_data.idr_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock kfence_freelist_lock irq_context: 0 sk_lock-AF_INET pcpu_lock stock_lock irq_context: softirq (&peer->timer_persistent_keepalive) batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1126 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#392 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tbl->lock krc.lock &base->lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sched_map-wait-type-override pool_lock#2 irq_context: 0 &data->open_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &rq->__lock irq_context: 0 &data->open_mutex &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &lruvec->lru_lock irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex &rq->__lock irq_context: 0 &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem pcpu_lock stock_lock irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex percpu_counters_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_lock nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work &obj_hash[i].lock pool_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 &dev->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC quarantine_lock irq_context: 0 &dev->mutex dpm_list_mtx.wait_lock irq_context: 0 nfc_devlist_mutex batched_entropy_u8.lock irq_context: 0 nfc_devlist_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 nfc_devlist_mutex kfence_freelist_lock irq_context: 0 nfc_devlist_mutex &meta->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#168 irq_context: 0 &dev->mutex (work_completion)(&rfkill->sync_work) &rq->__lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->sync_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock#2 pool_lock#2 irq_context: 0 misc_mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_VSOCK &rq->__lock irq_context: 0 sk_lock-AF_VSOCK &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex.wait_lock irq_context: 0 &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ids->rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rename_lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex.wait_lock irq_context: 0 rtnl_mutex &rnp->exp_wq[3] irq_context: 0 &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock#2 &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex bpf_devs_lock irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 wq_pool_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 wq_pool_mutex.wait_lock irq_context: 0 misc_mtx wq_pool_mutex.wait_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem console_owner_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem console_owner irq_context: 0 (wq_completion)nfc6_nci_tx_wq#23 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_rx_wq#18 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock quarantine_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (work_completion)(&local->timeout_work) &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem stock_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem key irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rq->__lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex rcu_node_0 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: hardirq &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &base->lock irq_context: 0 sb_internal jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock stock_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 ebt_mutex &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &____s->seqcount#2 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mount_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_internal remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rnp->exp_wq[2] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &rq->__lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 lweventlist_lock &dir->lock#2 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock kfence_freelist_lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 rtnl_mutex stack_depot_init_mutex &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock quarantine_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem batched_entropy_u8.lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->routes.lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key &n->list_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock pool_lock irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem kfence_freelist_lock irq_context: 0 rtnl_mutex devnet_rename_sem &meta->lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock sb_writers#12 irq_context: 0 (wq_completion)events (debug_obj_work).work quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#681 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1138 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#20 &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &br->lock &c->lock irq_context: 0 rtnl_mutex &br->lock pool_lock#2 irq_context: 0 rtnl_mutex &br->lock &dir->lock#2 irq_context: 0 sb_writers#5 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rnp->exp_wq[0] irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#168 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#38 irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&app->join_timer) &app->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&app->join_timer) &app->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&app->join_timer) &app->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#5 &base->lock irq_context: 0 sb_writers#5 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx device_links_lock &rq->__lock irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &____s->seqcount irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq irq_context: 0 &kernfs_locks->open_file_mutex[count] &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex.wait_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx device_links_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 rcu_read_lock &dentry->d_lock &p->pi_lock irq_context: 0 &dev->mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &dev->mutex uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex.wait_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock &c->lock irq_context: 0 sb_writers#9 fs_reclaim &rq->__lock irq_context: 0 sb_writers#9 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem &n->list_lock irq_context: 0 namespace_sem &n->list_lock &c->lock irq_context: 0 sb_writers#3 sb_internal &rq->__lock cpu_asid_lock irq_context: 0 &type->s_umount_key#47 rcu_node_0 irq_context: 0 &type->s_umount_key#47 &rcu_state.expedited_wq irq_context: 0 &type->s_umount_key#47 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &type->s_umount_key#47 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#47 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#47 &rq->__lock irq_context: 0 &type->s_umount_key#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 rcu_node_0 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &rcu_state.expedited_wq irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex rcu_state.exp_wake_mutex.wait_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex.wait_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rnp->exp_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rnp->exp_wq[2] irq_context: 0 &hdev->req_lock &hdev->lock &rnp->exp_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss &base->lock irq_context: 0 &fsnotify_mark_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &fsnotify_mark_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &wq->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx dpm_list_mtx.wait_lock irq_context: 0 rtnl_mutex &wq->mutex &rq->__lock irq_context: softirq (&app->join_timer) &app->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&app->join_timer) &app->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_hook_mutex fs_reclaim &rq->__lock irq_context: 0 nf_hook_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex netpoll_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex netpoll_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &list->lock#14 &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &list->lock#14 kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &obj_hash[i].lock pool_lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 sb_writers#13 &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &c->lock irq_context: 0 sb_writers#13 pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#13 mount_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#49 irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem bit_wait_table + i irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock cpu_asid_lock irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &n->list_lock irq_context: 0 sb_writers pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM (console_sem).lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner_lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner irq_context: 0 purge_vmap_area_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: softirq (&peer->timer_send_keepalive) &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) ovs_mutex &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#170 irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &ctx->wqh &alarm_bases[i].lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xdp.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock xps_map_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock xps_map_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &rq->__lock irq_context: 0 purge_vmap_area_lock quarantine_lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner &port_lock_key irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner console_owner_lock irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM pool_lock#2 irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &dir->lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &obj_hash[i].lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM stock_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ctx->wqh &alarm_bases[i].lock hrtimer_bases.lock irq_context: 0 &ctx->wqh &alarm_bases[i].lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start key irq_context: 0 &mm->mmap_lock sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem quarantine_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#9 remove_cache_srcu irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->lock fs_reclaim &rq->__lock irq_context: 0 &hdev->lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#9 remove_cache_srcu &c->lock irq_context: 0 sb_writers#9 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu pcpu_lock stock_lock irq_context: 0 sk_lock-AF_INET sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 batched_entropy_u8.lock crngs.lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#170 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1102 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM rcu_read_lock &sec->lock irq_context: 0 &alarm_bases[i].lock hrtimer_bases.lock irq_context: 0 &alarm_bases[i].lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 quarantine_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &alarm_bases[i].lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink nf_conntrack_mutex &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock stock_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock key irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &____s->seqcount irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &xt[i].mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &ctx->wqh hrtimer_bases.lock irq_context: 0 &ctx->wqh hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->wqh hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &base->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 rtnl_mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &meta->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock rcu_node_0 irq_context: 0 rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1102 &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 &base->lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &base->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 loop_validate_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)gid-cache-wq &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: softirq (&peer->timer_send_keepalive) &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 fanout_mutex &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 fanout_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#10 &n->list_lock irq_context: 0 kn->active#10 &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 &net->packet.sklist_lock &rq->__lock irq_context: 0 &net->packet.sklist_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock pool_lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock pool_lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock &dir->lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock deferred_lock irq_context: softirq (&mp->timer) &br->multicast_lock nl_table_lock irq_context: softirq (&mp->timer) &br->multicast_lock &obj_hash[i].lock irq_context: softirq (&mp->timer) &br->multicast_lock nl_table_wait.lock irq_context: softirq (&p->timer) irq_context: softirq (&p->timer) &br->multicast_lock irq_context: softirq (&p->timer) &br->multicast_lock pool_lock#2 irq_context: softirq (&p->timer) &br->multicast_lock &dir->lock#2 irq_context: softirq (&p->timer) &br->multicast_lock deferred_lock irq_context: softirq (&p->timer) &br->multicast_lock nl_table_lock irq_context: softirq (&p->timer) &br->multicast_lock &obj_hash[i].lock irq_context: softirq (&p->timer) &br->multicast_lock nl_table_wait.lock irq_context: softirq (&p->timer) &br->multicast_lock &base->lock irq_context: softirq (&p->timer) &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 pcpu_lock stock_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex kfence_freelist_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &meta->lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &dev->mutex udc_lock stock_lock irq_context: 0 &dev->mutex udc_lock &obj_hash[i].lock irq_context: 0 &dev->mutex udc_lock key irq_context: 0 &dev->mutex udc_lock pcpu_lock irq_context: 0 &dev->mutex udc_lock percpu_counters_lock irq_context: 0 &dev->mutex udc_lock pcpu_lock stock_lock irq_context: 0 &dev->mutex udc_lock pool_lock#2 irq_context: 0 &dev->mutex udc_lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock &c->lock irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock &n->list_lock irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#12 &dev->mutex &lock->wait_lock irq_context: 0 &p->lock &of->mutex kn->active#12 &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#12 &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&p->timer) &br->multicast_lock &c->lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rq->__lock cpu_asid_lock irq_context: 0 nfnl_subsys_ctnetlink &rq->__lock cpu_asid_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &____s->seqcount#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 tomoyo_ss remove_cache_srcu &____s->seqcount irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock batched_entropy_u8.lock irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &xa->xa_lock#9 &n->list_lock irq_context: 0 &xa->xa_lock#9 &n->list_lock &c->lock irq_context: 0 &bgl->locks[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#11 &____s->seqcount#2 irq_context: 0 kn->active#11 &pcp->lock &zone->lock irq_context: 0 kn->active#11 &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &c->lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &n->list_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &n->list_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#172 irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#172 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#173 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#383 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock pcpu_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock percpu_counters_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rcu_state.expedited_wq irq_context: 0 &p->lock &of->mutex kn->active#4 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 &map->freeze_mutex irq_context: 0 sb_internal jbd2_handle &journal->j_state_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#383 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#383 irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex.wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock nl_table_wait.lock &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu quarantine_lock irq_context: 0 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_read_lock hwsim_radio_lock init_task.mems_allowed_seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &rq->__lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &fsnotify_mark_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_internal remove_cache_srcu irq_context: 0 sb_internal remove_cache_srcu quarantine_lock irq_context: 0 sb_internal remove_cache_srcu &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal remove_cache_srcu &n->list_lock irq_context: 0 sb_internal remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_internal &rq->__lock irq_context: 0 sb_internal &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ep->mtx &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_internal remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_internal remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal remove_cache_srcu &rq->__lock irq_context: 0 sb_internal remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rq->__lock cpu_asid_lock irq_context: softirq (&peer->timer_persistent_keepalive) init_task.mems_allowed_seq.seqcount irq_context: softirq (&peer->timer_persistent_keepalive) &pcp->lock &zone->lock irq_context: softirq (&peer->timer_persistent_keepalive) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#381 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &base->lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &mapping->i_private_lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 pcpu_lock stock_lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#381 irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_PACKET &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 sb_internal batched_entropy_u8.lock irq_context: 0 sb_internal kfence_freelist_lock irq_context: 0 sb_internal &meta->lock irq_context: 0 system_transition_mutex device_hotplug_lock stock_lock irq_context: 0 system_transition_mutex device_hotplug_lock &obj_hash[i].lock irq_context: 0 system_transition_mutex device_hotplug_lock key irq_context: 0 system_transition_mutex device_hotplug_lock pcpu_lock irq_context: 0 system_transition_mutex device_hotplug_lock percpu_counters_lock irq_context: 0 system_transition_mutex device_hotplug_lock pcpu_lock stock_lock irq_context: 0 system_transition_mutex device_hotplug_lock pool_lock#2 irq_context: 0 system_transition_mutex device_hotplug_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu &n->list_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 batched_entropy_u8.lock crngs.lock irq_context: 0 rcu_read_lock rcu_read_lock &____s->seqcount#4 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_node_0 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &____s->seqcount irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock &n->list_lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &____s->seqcount irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 driver_id_numbers.xa_lock irq_context: 0 &dev->lock irq_context: 0 uevent_sock_mutex fs_reclaim irq_context: 0 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex.wait_lock irq_context: 0 udc_lock irq_context: 0 udc_lock (console_sem).lock irq_context: 0 udc_lock console_lock console_srcu console_owner_lock irq_context: 0 udc_lock console_lock console_srcu console_owner irq_context: 0 udc_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 udc_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 udc_lock &rq->__lock irq_context: 0 udc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 udc_lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->lock (console_sem).lock irq_context: 0 &dev->lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->lock console_lock console_srcu console_owner irq_context: 0 &dev->lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex &dev->lock irq_context: 0 &dev->mutex &queue->lock irq_context: 0 &dev->mutex &queue->lock pool_lock#2 irq_context: 0 &dev->mutex &queue->lock semaphore->lock#2 irq_context: 0 &dev->mutex &udc->connect_lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock &dev->power.lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 &dev->mutex &udc->connect_lock &dum_hcd->dum->lock hcd_root_hub_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &udc->connect_lock &rq->__lock irq_context: 0 &dev->mutex &udc->connect_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex quarantine_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock device_state_lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex usbfs_mutex irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex hcd_root_hub_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex fs_reclaim irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq lock#6 &kcov->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &x->wait#19 irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &____s->seqcount irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex fs_reclaim irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &x->wait#19 irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &base->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex (&timer.timer) irq_context: 0 &dev->mutex &udc->connect_lock hcd_root_hub_lock irq_context: 0 &dev->mutex udc_lock udc_lock.wait_lock irq_context: 0 &dev->mutex udc_lock &rq->__lock irq_context: 0 &dev->mutex udc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 udc_lock.wait_lock irq_context: 0 &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 semaphore->lock#2 irq_context: 0 &queue->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock &base->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex subsys mutex#40 &rq->__lock irq_context: 0 &dev->mutex subsys mutex#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex hcd_root_hub_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &rq->__lock irq_context: softirq (&hcd->rh_timer) irq_context: softirq (&hcd->rh_timer) &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#295 irq_context: 0 sb_internal jbd2_handle &bgl->locks[i].lock key#27 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &x->wait#9 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex devtree_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &dev->power.lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex device_state_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex device_state_lock kernfs_notify_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &bus->devnum_next_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem hcd_root_hub_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock semaphore->lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &____s->seqcount#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock semaphore->lock#2 &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 cb_lock genl_mutex wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex hcd_root_hub_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock semaphore->lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock semaphore->lock#2 &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &hub->irq_urb_lock irq_context: softirq &hub->irq_urb_lock hcd_root_hub_lock irq_context: softirq &hub->irq_urb_lock hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem device_state_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem device_state_lock kernfs_notify_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex (console_sem).lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &dum_hcd->dum->lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &dum_hcd->dum->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &dum_hcd->dum->lock &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &dum_hcd->dum->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex sched_map-wait-type-override &rq->__lock irq_context: softirq (&dum_hcd->timer) irq_context: softirq (&dum_hcd->timer) &dum_hcd->dum->lock irq_context: softirq (&dum_hcd->timer) &dev->lock irq_context: softirq (&dum_hcd->timer) &queue->lock irq_context: softirq (&dum_hcd->timer) &queue->lock pool_lock#2 irq_context: softirq (&dum_hcd->timer) &queue->lock semaphore->lock#2 irq_context: softirq (&dum_hcd->timer) &queue->lock semaphore->lock#2 &p->pi_lock irq_context: softirq (&dum_hcd->timer) &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock irq_context: softirq (&dum_hcd->timer) &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&dum_hcd->timer) &dum_hcd->dum->lock &obj_hash[i].lock irq_context: softirq (&dum_hcd->timer) &dum_hcd->dum->lock &base->lock irq_context: softirq (&dum_hcd->timer) &dum_hcd->dum->lock &base->lock &obj_hash[i].lock irq_context: 0 &dum_hcd->dum->lock irq_context: 0 &x->wait#27 irq_context: softirq (&dum_hcd->timer) &x->wait#27 irq_context: softirq (&dum_hcd->timer) &x->wait#27 &p->pi_lock irq_context: softirq (&dum_hcd->timer) &x->wait#27 &p->pi_lock &rq->__lock irq_context: softirq (&dum_hcd->timer) &x->wait#27 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&dum_hcd->timer) &dum_hcd->dum->lock pool_lock#2 irq_context: softirq (&dum_hcd->timer) &dum_hcd->dum->lock hcd_urb_list_lock irq_context: softirq (&dum_hcd->timer) lock#6 irq_context: softirq (&dum_hcd->timer) lock#6 kcov_remote_lock irq_context: softirq (&dum_hcd->timer) &x->wait#19 irq_context: softirq (&dum_hcd->timer) &x->wait#19 &p->pi_lock irq_context: softirq (&dum_hcd->timer) &x->wait#19 &p->pi_lock &rq->__lock irq_context: softirq (&dum_hcd->timer) &x->wait#19 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&dum_hcd->timer) lock#6 &kcov->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex quirk_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex usb_port_peer_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex usb_port_peer_mutex device_state_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex device_links_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dum_hcd->dum->lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dum_hcd->dum->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dum_hcd->dum->lock &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dum_hcd->dum->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex (console_sem).lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &c->lock irq_context: softirq (&dum_hcd->timer) &queue->lock &c->lock irq_context: softirq (&dum_hcd->timer) &queue->lock batched_entropy_u8.lock irq_context: softirq (&dum_hcd->timer) &queue->lock kfence_freelist_lock irq_context: softirq (&dum_hcd->timer) &x->wait#27 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex input_pool.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &k->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex bus_type_sem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &k->k_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dpm_list_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex req_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &x->wait#11 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &k->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &k->k_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex device_links_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex fwnode_link_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex device_links_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->devres_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex pinctrl_list_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex set_config_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex devtree_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &x->wait#9 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &dev->power.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &dum_hcd->dum->lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &dum_hcd->dum->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &dum_hcd->dum->lock &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &dum_hcd->dum->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex &lock->wait_lock irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->lock rcu_read_lock &pool->lock irq_context: 0 &dev->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->lock &obj_hash[i].lock irq_context: 0 &dev->lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex device_state_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex device_state_lock kernfs_notify_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dum_hcd->dum->lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dum_hcd->dum->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dum_hcd->dum->lock &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dum_hcd->dum->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&dum_hcd->timer) &x->wait#27 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex bus_type_sem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dpm_list_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &dum_hcd->dum->lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &dum_hcd->dum->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &dum_hcd->dum->lock &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &dum_hcd->dum->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock &p->pi_lock irq_context: 0 &dev->mutex (work_completion)(&udc->vbus_work) irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock pool_lock#2 irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock semaphore->lock#2 irq_context: 0 &dev->mutex &udc->connect_lock hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex &udc->connect_lock hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex &udc->connect_lock hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex &udc->connect_lock hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &udc->connect_lock hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &udc->connect_lock udc_lock irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock pool_lock#2 irq_context: 0 &dev->mutex &udc->connect_lock udc_lock &queue->lock semaphore->lock#2 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex klist_remove_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &k->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &k->k_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &new_driver->dynids.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex device_links_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex fwnode_link_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex device_links_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dev->devres_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex pinctrl_list_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex probe_waitqueue.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex subsys mutex#58 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &x->wait#9 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex (usb_notifier_list).rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex deferred_probe_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex probe_waitqueue.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex subsys mutex#58 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &x->wait#9 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) lock#6 &kcov->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd_root_hub_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex device_state_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex device_state_lock kernfs_notify_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex device_state_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock lock#4 &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd_urb_unlink_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex (usb_notifier_list).rwsem usbfs_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex klist_remove_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex deferred_probe_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex device_links_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex hcd_root_hub_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &x->wait#19 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink_exp nlk_cb_mutex-NETFILTER irq_context: 0 sk_lock-AF_INET6 clock-AF_INET6 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rq->__lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &base->lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock &rq->__lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &base->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim &rq->__lock cpu_asid_lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_node_0 irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kfence_freelist_lock irq_context: softirq (&dum_hcd->timer) &x->wait#19 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&dum_hcd->timer) &queue->lock semaphore->lock#2 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#48 remove_cache_srcu irq_context: 0 kn->active#48 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#48 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &____s->seqcount#2 irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &meta->lock irq_context: 0 sb_writers#5 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &fsnotify_mark_srcu irq_context: 0 sb_writers &s->s_inode_list_lock irq_context: 0 sb_writers &obj_hash[i].lock irq_context: 0 sb_writers &sbinfo->stat_lock irq_context: 0 sb_writers &xa->xa_lock#9 irq_context: 0 sb_writers &fsnotify_mark_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 kn->active#48 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#48 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#48 &____s->seqcount#2 irq_context: 0 kn->active#48 &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &meta->lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 udc_lock udc_lock.wait_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#48 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#48 &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &meta->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &n->list_lock irq_context: 0 lock kernfs_idr_lock &n->list_lock irq_context: 0 lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &____s->seqcount#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex lock kernfs_idr_lock &c->lock irq_context: softirq (&peer->timer_persistent_keepalive) batched_entropy_u8.lock irq_context: softirq (&peer->timer_persistent_keepalive) kfence_freelist_lock irq_context: 0 sk_lock-AF_INET6 slock-AF_INET6 &obj_hash[i].lock irq_context: 0 uevent_sock_mutex &____s->seqcount#2 irq_context: 0 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &lock->wait_lock irq_context: 0 &p->lock &of->mutex kn->active#9 &lock->wait_lock irq_context: 0 &p->lock &of->mutex kn->active#9 &p->pi_lock irq_context: 0 &p->lock &of->mutex kn->active#9 &p->pi_lock &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#9 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#10 &lock->wait_lock irq_context: 0 &p->lock &of->mutex kn->active#10 &p->pi_lock irq_context: 0 &p->lock &of->mutex kn->active#12 &lock->wait_lock irq_context: 0 &p->lock &of->mutex kn->active#12 &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &____s->seqcount#2 irq_context: softirq init_task.mems_allowed_seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &dev->lock console_owner_lock irq_context: 0 &dev->lock console_owner irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &obj_hash[i].lock irq_context: softirq &(&fw_cache.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&fw_cache.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex (&timer.timer) irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &n->list_lock &c->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 &dev->mutex &udc->connect_lock &queue->lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &base->lock &obj_hash[i].lock irq_context: 0 kn->active#11 &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock sched_map-wait-type-override &pool->lock irq_context: 0 kn->active#11 &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 kn->active#11 &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex hcd_root_hub_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock semaphore->lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock semaphore->lock#2 &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex hcd_root_hub_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &queue->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &queue->lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &queue->lock semaphore->lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &queue->lock semaphore->lock#2 &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock semaphore->lock#2 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock semaphore->lock#2 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &____s->seqcount irq_context: 0 tasklist_lock rcu_read_lock &sighand->siglock irq_context: 0 tasklist_lock &sighand->siglock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 udc_lock rcu_read_lock &rq->__lock irq_context: 0 udc_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 kn->active#4 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#5 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock batched_entropy_u8.lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock kfence_freelist_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &meta->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 udc_lock &cfs_rq->removed.lock irq_context: 0 udc_lock &obj_hash[i].lock irq_context: 0 udc_lock pool_lock#2 irq_context: 0 sb_writers#7 iattr_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&dum_hcd->timer) &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->lock &meta->lock irq_context: 0 &dev->lock kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex (&timer.timer) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 key#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &new->lock#2 console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 udc_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex &queue->lock kfence_freelist_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 rcu_node_0 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#392 &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock batched_entropy_u8.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &meta->lock irq_context: 0 &xt[i].mutex rcu_read_lock stock_lock irq_context: 0 sk_lock-AF_ROSE irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex freezer_mutex.wait_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex.wait_lock irq_context: 0 &ids->rwsem remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 key#24 irq_context: 0 sk_lock-AF_ROSE slock-AF_ROSE irq_context: 0 slock-AF_ROSE irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fill_pool_map-wait-type-override stock_lock irq_context: 0 fill_pool_map-wait-type-override key irq_context: 0 fill_pool_map-wait-type-override pcpu_lock irq_context: 0 fill_pool_map-wait-type-override percpu_counters_lock irq_context: 0 fill_pool_map-wait-type-override pcpu_lock stock_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#381 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#380 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &lock->wait_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle key#4 irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET6 pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem bit_wait_table + i irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &cfs_rq->removed.lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &obj_hash[i].lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 pool_lock#2 irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 syslog_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex remove_cache_srcu irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: softirq (&ndev->rs_timer) init_task.mems_allowed_seq.seqcount irq_context: 0 pfkey_mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock quarantine_lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock semaphore->lock#2 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 sb_writers#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &meta->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#7 &n->list_lock irq_context: 0 kn->active#7 &n->list_lock &c->lock irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#5 stock_lock irq_context: 0 sb_writers#5 key irq_context: 0 sb_writers#5 pcpu_lock irq_context: 0 sb_writers#5 percpu_counters_lock irq_context: 0 sb_writers#5 pcpu_lock stock_lock irq_context: 0 uevent_sock_mutex quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 sb_writers#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#12 &dev->mutex &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#12 &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#12 &p->pi_lock &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#12 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#12 &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem quarantine_lock irq_context: softirq net/wireless/reg.c:236 irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (reg_check_chans).work irq_context: 0 (wq_completion)events_power_efficient (reg_check_chans).work rtnl_mutex irq_context: 0 (wq_completion)events_power_efficient (reg_check_chans).work rtnl_mutex &rdev->wiphy.mtx irq_context: 0 &sb->s_type->i_lock_key#16 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &____s->seqcount#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#12 remove_cache_srcu irq_context: 0 kn->active#12 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#12 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#12 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#12 remove_cache_srcu &c->lock irq_context: 0 kn->active#12 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &rq->__lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#10 &rq->__lock irq_context: 0 kn->active#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&timer) rcu_read_lock &____s->seqcount#2 irq_context: softirq (&timer) rcu_read_lock &____s->seqcount irq_context: 0 rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 &sb->s_type->i_lock_key#16 &dentry->d_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pfkey_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &____s->seqcount irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &pl->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &pl->lock key#12 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &resv_map->lock irq_context: 0 cb_lock genl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 &tsk->futex_exit_mutex stock_lock irq_context: 0 &tsk->futex_exit_mutex key irq_context: 0 &tsk->futex_exit_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &n->list_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &n->list_lock &c->lock irq_context: 0 &tsk->futex_exit_mutex percpu_counters_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &queue->lock &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock &cfs_rq->removed.lock irq_context: 0 udc_lock console_owner_lock irq_context: 0 udc_lock console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex console_owner_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock batched_entropy_u8.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock kfence_freelist_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#382 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) quarantine_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem bit_wait_table + i irq_context: 0 &sbi->s_writepages_rwsem sched_map-wait-type-override rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sem->wait_lock irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#8 &____s->seqcount#2 irq_context: 0 kn->active#8 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_internal &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &sem->wait_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &p->pi_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_internal jbd2_handle irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 cb_lock &dir->lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal &journal->j_state_lock irq_context: 0 sb_internal &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_internal &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_internal &journal->j_state_lock &base->lock irq_context: 0 sb_internal &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle bit_wait_table + i irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &ret->b_state_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu quarantine_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#6 remove_cache_srcu irq_context: 0 kn->active#6 remove_cache_srcu quarantine_lock irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rhashtable_bucket irq_context: 0 kn->active#6 remove_cache_srcu &c->lock irq_context: 0 kn->active#6 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#6 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#6 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#6 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 kn->active#12 &rq->__lock irq_context: 0 kn->active#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events kernfs_notify_work &root->kernfs_supers_rwsem &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 &dev->mutex &queue->lock &c->lock irq_context: 0 &dev->mutex &queue->lock &n->list_lock irq_context: 0 &dev->mutex &queue->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events kernfs_notify_work &root->kernfs_supers_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 init_task.mems_allowed_seq.seqcount irq_context: softirq (&dum_hcd->timer) &queue->lock &n->list_lock irq_context: softirq (&dum_hcd->timer) &queue->lock &n->list_lock &c->lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &group->mark_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 kn->active#8 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#6 &n->list_lock irq_context: 0 kn->active#6 &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 init_task.mems_allowed_seq.seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &____s->seqcount irq_context: 0 &dev->mutex &udc->connect_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 namespace_sem stock_lock irq_context: 0 pernet_ops_rwsem stock_lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock &c->lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_hook_mutex stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock stock_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock stock_lock irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex stock_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex stock_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex stock_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 namespace_sem &cfs_rq->removed.lock irq_context: 0 namespace_sem &obj_hash[i].lock irq_context: 0 sb_internal remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock stock_lock irq_context: softirq rcu_callback pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem crngs.lock base_crng.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx batched_entropy_u8.lock crngs.lock irq_context: 0 pernet_ops_rwsem nf_log_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_log_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock &c->lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex.wait_lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock &rq->__lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nl_table_lock nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 namespace_sem pcpu_alloc_mutex.wait_lock irq_context: 0 namespace_sem &p->pi_lock irq_context: 0 namespace_sem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 namespace_sem &p->pi_lock &rq->__lock irq_context: 0 namespace_sem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock kfence_freelist_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->work_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &wb->list_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex.wait_lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu &rq->__lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem remove_cache_srcu irq_context: 0 pernet_ops_rwsem remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &rq->__lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex stack_depot_init_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &____s->seqcount irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 sb_internal jbd2_handle irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &meta->lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &meta->lock irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock &n->list_lock irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem quarantine_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock stock_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_internal &____s->seqcount#2 irq_context: 0 sb_internal &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_hook_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) &root->kernfs_rwsem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &____s->seqcount irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_wq[2] irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex quarantine_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex bus_type_sem &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_read_lock key irq_context: 0 pernet_ops_rwsem rcu_read_lock pcpu_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock percpu_counters_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_internal &____s->seqcount irq_context: 0 pernet_ops_rwsem batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem kfence_freelist_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &p->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &p->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &p->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &meta->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &lock->wait_lock irq_context: 0 sb_internal jbd2_handle &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#9 &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &obj_hash[i].lock pool_lock irq_context: softirq rcu_callback &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &____s->seqcount irq_context: 0 kn->active#4 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem &hub->status_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 key#24 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &meta->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &rq->__lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_internal &n->list_lock irq_context: 0 sb_internal &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu stock_lock irq_context: 0 &fsnotify_mark_srcu key irq_context: 0 &fsnotify_mark_srcu pcpu_lock irq_context: 0 &fsnotify_mark_srcu percpu_counters_lock irq_context: 0 &fsnotify_mark_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem proc_inum_ida.xa_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem proc_inum_ida.xa_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events fqdir_free_work &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &tsk->futex_exit_mutex pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#10 irq_context: 0 &p->lock &of->mutex kn->active#4 udc_lock udc_lock.wait_lock irq_context: 0 &p->lock &of->mutex kn->active#4 udc_lock &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 udc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &mm->mmap_lock &rq->__lock irq_context: 0 rtnl_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#9 &____s->seqcount#2 irq_context: 0 kn->active#9 &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 &disk->open_mutex &rq->__lock cpu_asid_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &wb->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->work_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->work_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &rq->__lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#9 remove_cache_srcu irq_context: 0 kn->active#9 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#9 remove_cache_srcu &c->lock irq_context: 0 kn->active#9 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock &hub->status_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &ep->mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 &ep->mtx quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 key irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 percpu_counters_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#3 &mm->mmap_lock key irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock pcpu_lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 sb_writers#3 &mm->mmap_lock percpu_counters_lock irq_context: 0 sb_writers#3 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 &vma->vm_lock->lock batched_entropy_u8.lock crngs.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rq->__lock cpu_asid_lock irq_context: 0 cb_lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 cb_lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work)#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 &f->f_pos_lock sb_writers#12 &rq->__lock irq_context: 0 &f->f_pos_lock sb_writers#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock sb_writers#12 &mm->mmap_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &____s->seqcount irq_context: 0 sb_writers#6 &cfs_rq->removed.lock irq_context: 0 sb_writers#6 &obj_hash[i].lock irq_context: 0 &ids->rwsem &p->pi_lock irq_context: 0 &ids->rwsem &p->pi_lock &rq->__lock irq_context: 0 &ids->rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#10 mount_lock irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 &dev->mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#382 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#382 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#382 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#173 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#174 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#382 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 pidmap_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim stock_lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock krc.lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&ns->work) keyring_name_lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ns->work) proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem nl_table_wait.lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &p->alloc_lock &x->wait#25 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &table->hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &table->hash[i].lock &table->hash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &xa->xa_lock#9 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &fsnotify_mark_srcu irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-clock-AF_RXRPC irq_context: 0 pernet_ops_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem (wq_completion)krxrpcd irq_context: 0 pernet_ops_rwsem (wq_completion)krxrpcd &rq->__lock irq_context: 0 &root->kernfs_rwsem &base->lock irq_context: 0 &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem (wq_completion)krxrpcd &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem (wq_completion)krxrpcd &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem (wq_completion)krxrpcd &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem (wq_completion)krxrpcd pool_lock#2 irq_context: 0 pernet_ops_rwsem &wq->mutex irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock cpu_asid_lock irq_context: 0 &xt[i].mutex &obj_hash[i].lock pool_lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &wq->mutex &pool->lock irq_context: 0 pernet_ops_rwsem &wq->mutex &x->wait#10 irq_context: 0 &xt[i].mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &____s->seqcount irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 &xa->xa_lock#20 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &xa->xa_lock#20 fill_pool_map-wait-type-override &c->lock irq_context: 0 &xa->xa_lock#20 fill_pool_map-wait-type-override pool_lock irq_context: softirq rcu_callback put_task_map-wait-type-override pool_lock irq_context: 0 pidmap_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle key#27 irq_context: 0 &vma->vm_lock->lock fs_reclaim stock_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim key irq_context: 0 &vma->vm_lock->lock fs_reclaim pcpu_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim percpu_counters_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) quarantine_lock irq_context: 0 pernet_ops_rwsem rlock-AF_RXRPC irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim pool_lock#2 irq_context: 0 &f->f_pos_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#10 &dentry->d_lock irq_context: 0 sb_writers#10 tomoyo_ss irq_context: 0 sb_writers#10 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#10 tomoyo_ss rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#10 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#10 fs_reclaim irq_context: 0 sb_writers#10 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#10 stock_lock irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &mapping->i_mmap_rwsem irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &wb->list_lock irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &wb->list_lock &sb->s_type->i_lock_key#7 irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#10 tomoyo_ss &c->lock irq_context: 0 sb_writers#10 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#10 tomoyo_ss &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &base->lock irq_context: 0 pernet_ops_rwsem &fsnotify_mark_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &mapping->i_private_lock irq_context: 0 (wq_completion)nfc9_nci_rx_wq#4 irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#382 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#327 irq_context: 0 pernet_ops_rwsem (&net->fs_probe_timer) irq_context: 0 sb_writers#10 &c->lock irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#10 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#382 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#381 irq_context: 0 pernet_ops_rwsem &net->cells_lock irq_context: 0 sb_writers#10 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#10 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 pernet_ops_rwsem (&net->cells_timer) irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pcpu_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 &ep->mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem (&net->fs_timer) irq_context: 0 pernet_ops_rwsem &ent->pde_unload_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&q->timeout_work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)kblockd (work_completion)(&q->timeout_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rcu_read_lock &tn->node_list_lock irq_context: 0 rtnl_mutex console_owner_lock irq_context: 0 rtnl_mutex console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &____s->seqcount#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key#2 stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock &wq#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &c->lock irq_context: 0 pernet_ops_rwsem ebt_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#622 irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &n->list_lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &mm->mmap_lock stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#622 &rq->__lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#381 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1102 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1102 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem &xt[i].mutex irq_context: 0 pernet_ops_rwsem &nft_net->commit_mutex irq_context: 0 pernet_ops_rwsem netns_bpf_mutex irq_context: 0 pernet_ops_rwsem &x->wait#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 rtnl_mutex rcu_state.exp_mutex stock_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem krc.lock irq_context: 0 pernet_ops_rwsem ovs_mutex irq_context: 0 pernet_ops_rwsem ovs_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem ovs_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem ovs_mutex (work_completion)(&data->gc_work) irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_node_0 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock &rq->__lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &lock->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex stock_lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &c->lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock stock_lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1102 irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 &kcov->lock kcov_remote_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rnp->exp_wq[3] irq_context: 0 sb_lock &obj_hash[i].lock pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock irq_context: 0 &kcov->lock kcov_remote_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 kn->active#46 &rq->__lock irq_context: 0 kn->active#46 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 &root->deactivate_waitq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#9 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#12 &____s->seqcount#2 irq_context: 0 kn->active#12 &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex hcd->bandwidth_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 kn->active#23 &n->list_lock irq_context: 0 kn->active#23 &n->list_lock &c->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &meta->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) kfence_freelist_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &br->lock deferred_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock &base->lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#657 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#3 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 kn->active#20 remove_cache_srcu irq_context: 0 kn->active#20 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#20 remove_cache_srcu &c->lock irq_context: 0 kn->active#20 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#20 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#20 remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &cfs_rq->removed.lock irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#382 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock init_task.mems_allowed_seq.seqcount irq_context: 0 rtnl_mutex crngs.lock base_crng.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 (wq_completion)wg-kex-wg0#296 irq_context: 0 cgroup_threadgroup_rwsem &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &rq->__lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &p->lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &p->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex key irq_context: 0 &p->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex pcpu_lock irq_context: 0 &p->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex percpu_counters_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex pcpu_lock stock_lock irq_context: 0 &hdev->lock &____s->seqcount#2 irq_context: 0 &hdev->lock &____s->seqcount irq_context: 0 rtnl_mutex &tbl->lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &tbl->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex stock_lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ovs_mutex rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem ovs_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem ovs_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: softirq (&app->join_timer)#2 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &rq->__lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex net_rwsem &____s->seqcount irq_context: 0 rtnl_mutex net_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#176 irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#5 irq_context: 0 &sig->cred_guard_mutex &obj_hash[i].lock pool_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink_exp nlk_cb_mutex-NETFILTER fs_reclaim irq_context: 0 nfnl_subsys_ctnetlink_exp nlk_cb_mutex-NETFILTER fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nfnl_subsys_ctnetlink_exp nlk_cb_mutex-NETFILTER &c->lock irq_context: 0 nfnl_subsys_ctnetlink_exp nlk_cb_mutex-NETFILTER pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock &c->lock irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#386 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &____s->seqcount irq_context: 0 nfnl_subsys_ctnetlink_exp rlock-AF_NETLINK irq_context: 0 nfnl_subsys_ctnetlink_exp nlk_cb_mutex-NETFILTER &n->list_lock irq_context: 0 nfnl_subsys_ctnetlink_exp nlk_cb_mutex-NETFILTER &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex stock_lock irq_context: 0 pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#388 irq_context: 0 misc_mtx system_transition_mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx system_transition_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->uring_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 probe_waitqueue.lock irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 cb_lock genl_mutex &nbd->config_lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex &br->lock (console_sem).lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#23 &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#23 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock stock_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock pcpu_lock stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#391 irq_context: 0 misc_mtx nfc_devlist_mutex &meta->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#391 irq_context: 0 cb_lock genl_mutex &nbd->config_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 pernet_ops_rwsem ovs_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx rfkill_global_mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx rfkill_global_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#391 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &meta->lock irq_context: 0 rtnl_mutex &wg->device_update_lock &rnp->exp_wq[1] irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_node_0 irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex netpoll_srcu &rq->__lock irq_context: 0 rtnl_mutex netpoll_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 kfence_freelist_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 fill_pool_map-wait-type-override &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock pool_lock#2 irq_context: 0 &xa->xa_lock#22 &n->list_lock irq_context: 0 &xa->xa_lock#22 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &xa->xa_lock#5 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex probe_waitqueue.lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex probe_waitqueue.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex probe_waitqueue.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex probe_waitqueue.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &n->list_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &iopt->domains_rwsem irq_context: 0 &iopt->domains_rwsem &iopt->iova_rwsem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 stock_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &obj_hash[i].lock irq_context: 0 uevent_sock_mutex rcu_read_lock key irq_context: 0 uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: 0 uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 &u->iolock &pcp->lock &zone->lock irq_context: 0 &u->iolock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock kfence_freelist_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex nf_conntrack_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 tk_core.seq.seqcount irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 &base->lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx system_transition_mutex rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx system_transition_mutex rcu_read_lock &rq->__lock irq_context: 0 system_transition_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq &p->pi_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock quarantine_lock irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 drm_unplug_srcu irq_context: 0 &dev->master_mutex irq_context: 0 &dev->master_mutex fs_reclaim irq_context: 0 &dev->master_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->master_mutex &c->lock irq_context: 0 &dev->master_mutex &____s->seqcount irq_context: 0 &dev->master_mutex pool_lock#2 irq_context: 0 &dev->master_mutex &file->master_lookup_lock irq_context: 0 &dev->event_lock irq_context: 0 &dev->master_mutex &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 system_transition_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_read_lock rcu_node_0 irq_context: 0 system_transition_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 batched_entropy_u8.lock crngs.lock irq_context: 0 &ctx->uring_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#44 irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_node_0 irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock ptlock_ptr(ptdesc)#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ctx->uring_lock rcu_node_0 irq_context: 0 &ctx->uring_lock &rcu_state.expedited_wq irq_context: 0 &ctx->uring_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ctx->uring_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ctx->uring_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#664 irq_context: 0 nf_sockopt_mutex stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nft_net->commit_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nft_net->commit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_node_0 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock key irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &lock->wait_lock irq_context: 0 nf_sockopt_mutex key irq_context: 0 nf_sockopt_mutex pcpu_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 nf_sockopt_mutex percpu_counters_lock irq_context: 0 nf_sockopt_mutex pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 rtnl_mutex dpm_list_mtx rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex dpm_list_mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#44 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wq->mutex key irq_context: 0 rtnl_mutex &wq->mutex pcpu_lock irq_context: 0 rtnl_mutex &wq->mutex percpu_counters_lock irq_context: 0 (wq_completion)nfc40_nci_cmd_wq#3 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &meta->lock irq_context: 0 (wq_completion)nfc40_nci_rx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1087 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &cfs_rq->removed.lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 nf_nat_proto_mutex &rq->__lock irq_context: 0 nf_nat_proto_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock pool_lock irq_context: 0 &xt[i].mutex batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &base->lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work fill_pool_map-wait-type-override &c->lock irq_context: 0 bdev_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &obj_hash[i].lock irq_context: 0 &nbd->config_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &nbd->config_lock &q->mq_freeze_lock irq_context: 0 &nbd->config_lock &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 &nbd->config_lock &q->mq_freeze_lock percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &nbd->config_lock &q->mq_freeze_lock percpu_ref_switch_lock pool_lock#2 irq_context: 0 &nbd->config_lock &q->mq_freeze_lock &rq->__lock irq_context: 0 &nbd->config_lock set->srcu irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem ovs_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ovs_mutex net_rwsem irq_context: 0 pernet_ops_rwsem (work_completion)(&(&ovs_net->masks_rebalance)->work) irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: softirq (&in_dev->mr_ifc_timer) fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem base_crng.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem (console_sem).lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem console_lock console_srcu console_owner_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem console_lock console_srcu console_owner irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem console_lock console_srcu console_owner &port_lock_key irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx system_transition_mutex fs_reclaim irq_context: 0 misc_mtx system_transition_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx system_transition_mutex pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex &pcp->lock &zone->lock irq_context: 0 misc_mtx system_transition_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex (console_sem).lock irq_context: 0 system_transition_mutex &obj_hash[i].lock irq_context: 0 system_transition_mutex pool_lock#2 irq_context: 0 system_transition_mutex (console_sem).lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem batched_entropy_u8.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem batched_entropy_u8.lock crngs.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem kfence_freelist_lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem (work_completion)(&ovs_net->dp_notify_work) irq_context: 0 pernet_ops_rwsem &srv->idr_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 &data->open_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 pernet_ops_rwsem wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#673 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 misc_mtx rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 rcu_state.barrier_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET stock_lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem dev_base_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss stock_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss key irq_context: 0 &sig->cred_guard_mutex tomoyo_ss pcpu_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss pcpu_lock stock_lock irq_context: 0 sb_writers#4 oom_adj_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#4 oom_adj_mutex &obj_hash[i].lock irq_context: 0 sb_writers#4 oom_adj_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &nt->cluster_scope_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock krc.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC k-clock-AF_TIPC irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#5 &sb->s_type->i_lock_key &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex rcu_node_0 irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#4 &n->list_lock irq_context: 0 sb_writers#4 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex.wait_lock irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &sb->s_type->i_lock_key#20 irq_context: 0 &type->i_mutex_dir_key#3 stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &____s->seqcount#2 irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &____s->seqcount irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#398 irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &n->list_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) rcu_node_0 irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &mapping->i_mmap_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 pernet_ops_rwsem (work_completion)(&tn->work) irq_context: 0 &acomp_ctx->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#30 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#398 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#185 irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 pernet_ops_rwsem &tn->nametbl_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#400 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#400 irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#400 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex ehci_cf_port_reset_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#408 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#411 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#411 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#412 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 kcov_remote_lock irq_context: 0 rcu_read_lock &wb->work_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount irq_context: 0 rcu_read_lock &wb->work_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem quarantine_lock irq_context: 0 rcu_read_lock &wb->work_lock &base->lock irq_context: 0 rcu_read_lock &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &wb->work_lock rcu_read_lock &pool->lock irq_context: 0 rcu_read_lock &wb->work_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &wb->work_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &fq->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &local->active_txq_lock[i] irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx (work_completion)(&sta->drv_deliver_wk) irq_context: 0 rcu_read_lock &wb->work_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_read_lock &wb->work_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &wb->work_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pgdat->reclaim_wait[i] irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 &sma->sems[i].lock irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 pernet_ops_rwsem (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#11 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#11 &lruvec->lru_lock irq_context: 0 &vma->vm_lock->lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 &vma->vm_lock->lock &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &vma->vm_lock->lock lock#4 lock#11 irq_context: 0 &vma->vm_lock->lock lock#4 lock#11 &lruvec->lru_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &lock->wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem &ht->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &ht->mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &ht->mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem (work_completion)(&(&c->work)->work) irq_context: 0 pernet_ops_rwsem napi_hash_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-clock-AF_INET6 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rq->__lock cid_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &root->kernfs_rwsem quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_owner irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sb_internal jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx &x->wait#2 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#199 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#199 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 br_ioctl_mutex rtnl_mutex.wait_lock irq_context: 0 br_ioctl_mutex &p->pi_lock irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 &n->list_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sbi->s_md_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &ei->i_prealloc_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &pa->pa_lock#2 irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex j1939_netdev_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex j1939_netdev_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) purge_vmap_area_lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg1#299 irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 misc_mtx nfc_index_ida.xa_lock &c->lock irq_context: 0 misc_mtx nfc_index_ida.xa_lock &n->list_lock irq_context: 0 misc_mtx nfc_index_ida.xa_lock &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &mm->mmap_lock fs_reclaim rcu_node_0 irq_context: 0 misc_mtx nfc_index_ida.xa_lock pool_lock#2 irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &k->k_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NETLINK rcu_node_0 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex remove_cache_srcu irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 kn->active#47 &n->list_lock irq_context: 0 kn->active#47 &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock pcpu_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem (wq_completion)krdsd irq_context: 0 pernet_ops_rwsem (work_completion)(&rtn->rds_tcp_accept_w) irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &icsk->icsk_accept_queue.rskq_lock#2 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex subsys mutex#20 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock percpu_counters_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &blkcg->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &blkcg->lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_raw_lock irq_context: 0 (wq_completion)events free_ipc_work &xa->xa_lock#5 &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &xa->xa_lock#5 pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock &pa->pa_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem key#3 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) key#15 irq_context: 0 (wq_completion)events free_ipc_work stock_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock key#11 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &retval->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &xa->xa_lock#9 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &xa->xa_lock#9 stock_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &xa->xa_lock#9 pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle lock#4 &lruvec->lru_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem &rq_wait->wait irq_context: 0 &sbi->s_writepages_rwsem &__ctx->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ei->i_data_sem &ei->i_prealloc_lock &pa->pa_lock#2 irq_context: 0 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &ei->i_data_sem pool_lock#2 irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem key#3 irq_context: 0 sb_internal jbd2_handle &ei->i_prealloc_lock irq_context: 0 sb_internal jbd2_handle &journal->j_list_lock irq_context: 0 sb_internal jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &ep->mtx &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &sbi->s_writepages_rwsem &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &retval->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &nvmeq->sq_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_es_lock key#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &lruvec->lru_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock pcpu_lock stock_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &____s->seqcount irq_context: 0 kn->active#46 remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock batched_entropy_u8.lock irq_context: 0 kn->active#46 remove_cache_srcu quarantine_lock irq_context: 0 &disk->open_mutex stock_lock irq_context: 0 &disk->open_mutex key irq_context: 0 &disk->open_mutex pcpu_lock irq_context: 0 &disk->open_mutex percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc4_nci_cmd_wq#50 irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#51 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 kn->active#46 remove_cache_srcu &c->lock irq_context: 0 kn->active#46 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#46 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#46 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#46 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex stock_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex key irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex percpu_counters_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex pcpu_lock stock_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex pool_lock#2 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 rtnl_mutex net_rwsem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_callback key#28 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#290 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock lock#4 lock#11 irq_context: 0 &mm->mmap_lock lock#4 lock#11 &lruvec->lru_lock irq_context: 0 &mm->mmap_lock lock#4 lock#11 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_TIPC irq_context: 0 sk_lock-AF_TIPC &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 sk_lock-AF_TIPC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_log_mutex &rq->__lock irq_context: 0 sk_lock-AF_TIPC slock-AF_TIPC irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 slock-AF_TIPC irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC irq_context: 0 rtnl_mutex net_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u8.lock irq_context: 0 &lo->lo_mutex &n->list_lock irq_context: 0 &lo->lo_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &list->lock#14 quarantine_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#591 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#591 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_log_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &ids->rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#591 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock xps_map_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock xps_map_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#590 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &nbd->config_lock percpu_ref_switch_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET key irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &of->mutex kn->active#50 &rq->__lock irq_context: 0 sb_writers#9 &of->mutex kn->active#50 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock stock_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock stock_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rds_tcp_conn_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem sysctl_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem sysctl_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem sysctl_lock krc.lock irq_context: 0 pernet_ops_rwsem loop_conns_lock irq_context: 0 pernet_ops_rwsem (wq_completion)l2tp irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex &x->wait#24 irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 irq_context: 0 pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 pernet_ops_rwsem rtnl_mutex bpf_devs_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &in_dev->mc_tomb_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex class irq_context: 0 pernet_ops_rwsem rtnl_mutex (&tbl->proxy_timer) irq_context: 0 pernet_ops_rwsem rtnl_mutex &base->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &ul->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &net->xdp.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex mirred_list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &nft_net->commit_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &ul->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &ent->pde_unload_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &ndev->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &ndev->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex (work_completion)(&(&idev->mc_report_work)->work) irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_report_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex krc.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnn->routes.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex target_list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex _xmit_SIT irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock klist_remove_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex deferred_probe_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex device_links_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock xps_map_mutex irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex.wait_lock irq_context: 0 pernet_ops_rwsem (&rxnet->peer_keepalive_timer) irq_context: 0 pernet_ops_rwsem (work_completion)(&rxnet->peer_keepalive_work) irq_context: 0 pernet_ops_rwsem (&rxnet->service_conn_reap_timer) irq_context: 0 pernet_ops_rwsem &x->wait#10 irq_context: 0 pernet_ops_rwsem rtnl_mutex _xmit_NONE irq_context: 0 pernet_ops_rwsem lweventlist_lock irq_context: 0 pernet_ops_rwsem &dir->lock#2 &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &dir->lock#2 pool_lock#2 irq_context: 0 pernet_ops_rwsem netdev_unregistering_wq.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex _xmit_TUNNEL6 irq_context: 0 pernet_ops_rwsem rtnl_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 pernet_ops_rwsem &ht->mutex &rq->__lock irq_context: 0 pernet_ops_rwsem &ht->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex _xmit_TUNNEL irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 pernet_ops_rwsem &sn->pipefs_sb_lock &rq->__lock irq_context: 0 pernet_ops_rwsem &sn->pipefs_sb_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex _xmit_ETHER irq_context: 0 pernet_ops_rwsem rtnl_mutex &tn->lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &disk->open_mutex &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC slock-AF_TIPC irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC &obj_hash[i].lock irq_context: 0 &disk->open_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &disk->open_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#381 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC rcu_read_lock rhashtable_bucket irq_context: 0 &sighand->siglock &n->list_lock irq_context: 0 &sighand->siglock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex key irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu kfence_freelist_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 lock pidmap_lock batched_entropy_u8.lock irq_context: 0 lock pidmap_lock kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu pcpu_lock stock_lock irq_context: 0 &type->i_mutex_dir_key#7 pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rcu_state.expedited_wq irq_context: 0 &type->i_mutex_dir_key#7 &dentry->d_lock &wq irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#6 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#6 &xa->xa_lock#5 irq_context: 0 &type->i_mutex_dir_key#6 &xa->xa_lock#5 &c->lock irq_context: 0 &type->i_mutex_dir_key#6 &xa->xa_lock#5 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#6 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#6 stock_lock irq_context: 0 &type->i_mutex_dir_key#6 &dentry->d_lock &wq irq_context: 0 &type->i_mutex_dir_key#2 &xa->xa_lock#5 irq_context: 0 &type->i_mutex_dir_key#2 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#2 stock_lock irq_context: 0 (wq_completion)events_long &rq->__lock irq_context: 0 (wq_completion)events_long &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_osf &rq->__lock irq_context: 0 nfnl_subsys_osf &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_sockopt_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem bit_wait_table + i irq_context: 0 &type->i_mutex_dir_key#2 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 &dentry->d_lock &wq#2 irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events free_ipc_work pool_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex remove_cache_srcu &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex _xmit_IPGRE irq_context: 0 pernet_ops_rwsem &fn->fou_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &disk->open_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_node_0 irq_context: 0 rtnl_mutex &ndev->lock batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem ipvs->sync_mutex irq_context: 0 pernet_ops_rwsem hwsim_radio_lock irq_context: 0 pernet_ops_rwsem rdma_nets_rwsem irq_context: 0 pernet_ops_rwsem rdma_nets_rwsem rdma_nets.xa_lock irq_context: 0 pernet_ops_rwsem k-clock-AF_NETLINK irq_context: 0 pernet_ops_rwsem &nlk->wait irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 pernet_ops_rwsem rtnl_mutex _xmit_LOOPBACK irq_context: 0 pernet_ops_rwsem &hn->hn_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock &wq#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &wq#2 irq_context: 0 &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &x->wait#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock kfence_freelist_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &tbl->lock &n->list_lock irq_context: 0 rtnl_mutex &tbl->lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &this->info_list_lock irq_context: 0 pernet_ops_rwsem &pnettable->lock irq_context: 0 pernet_ops_rwsem &pnetids_ndev->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6/1 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 quarantine_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem krc.lock &base->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock base_crng.lock irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock &n->list_lock irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock &n->list_lock &c->lock irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock rcu_node_0 irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink_exp rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 pfkey_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 pfkey_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_KEY irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6/1 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6/1 rlock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6/1 &list->lock#19 irq_context: 0 pernet_ops_rwsem &net->sctp.addr_wq_lock irq_context: 0 pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 irq_context: 0 pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 &sctp_ep_hashtable[i].lock irq_context: 0 pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 pool_lock#2 irq_context: 0 pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 k-clock-AF_INET6 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &meta->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &rnp->exp_wq[0] irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&ns->work) sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC clock-AF_TIPC irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 rtnl_mutex &dev->tx_global_lock _xmit_SIT#2 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fib_info_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex fib_info_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &ndev->lock &base->lock irq_context: 0 rtnl_mutex &ndev->lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock &n->list_lock irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock &n->list_lock &c->lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &____s->seqcount#2 irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#175 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &dev->mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock batched_entropy_u8.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &obj_hash[i].lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock batched_entropy_u8.lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock kfence_freelist_lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 &p->pi_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key#2 stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock &wq#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &sbinfo->stat_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xa->xa_lock#5 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &sb->s_type->i_lock_key#5 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &s->s_inode_list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tk_core.seq.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 batched_entropy_u32.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xattrs->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &simple_offset_xa_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xattrs->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 smack_known_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 pernet_ops_rwsem k-slock-AF_INET6 &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-slock-AF_INET6 pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#420 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#592 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 listen_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &rq->__lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_VSOCK &____s->seqcount#2 irq_context: 0 sk_lock-AF_VSOCK &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_VSOCK &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_VSOCK &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 &anon_vma->rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 system_transition_mutex device_hotplug_lock rcu_node_0 irq_context: 0 system_transition_mutex device_hotplug_lock &rcu_state.expedited_wq irq_context: 0 system_transition_mutex device_hotplug_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 system_transition_mutex device_hotplug_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 system_transition_mutex device_hotplug_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#175 irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#420 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#212 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#212 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#420 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#13 irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#207 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#56 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#211 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#437 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#444 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#444 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#220 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#452 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#454 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#454 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#455 irq_context: 0 tasklist_lock &sighand->siglock stock_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#455 irq_context: 0 &sig->cred_guard_mutex stock_lock irq_context: 0 &sig->cred_guard_mutex pcpu_lock stock_lock irq_context: softirq (&peer->timer_retransmit_handshake) &list->lock#14 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#467 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#13 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex irq_context: softirq rcu_read_lock hwsim_radio_lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#5 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock sb_writers#5 &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override rcu_node_0 irq_context: 0 tasklist_lock &sighand->siglock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &n->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#467 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#23 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 kfence_freelist_lock irq_context: 0 &xt[i].mutex remove_cache_srcu &meta->lock irq_context: 0 &xt[i].mutex remove_cache_srcu kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#469 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#474 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#477 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#477 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#23 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#23 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#68 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#248 irq_context: 0 kn->active#22 &rq->__lock irq_context: 0 kn->active#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &net->ipv4.ra_mutex &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#486 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#249 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#249 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#70 &rq->__lock irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#490 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#71 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#71 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#251 irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#251 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &journal->j_list_lock &meta->lock irq_context: 0 &journal->j_list_lock kfence_freelist_lock irq_context: 0 sb_internal jbd2_handle &____s->seqcount irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#251 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &nbd->config_lock (console_sem).lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)gid-cache-wq rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq &rcu_state.expedited_wq irq_context: 0 (wq_completion)gid-cache-wq &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfnl_subsys_cthelper (console_sem).lock irq_context: 0 nfnl_subsys_cthelper console_lock console_srcu console_owner_lock irq_context: 0 nfnl_subsys_cthelper console_lock console_srcu console_owner irq_context: 0 nfnl_subsys_cthelper console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 nfnl_subsys_cthelper console_lock console_srcu console_owner console_owner_lock irq_context: 0 nfnl_subsys_cthelper &rq->__lock irq_context: 0 nfnl_subsys_cthelper &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#251 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 kn->active#49 &rq->__lock irq_context: 0 bpf_devs_lock remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem pool_lock#2 irq_context: 0 bpf_devs_lock remove_cache_srcu quarantine_lock irq_context: 0 bpf_devs_lock remove_cache_srcu &c->lock irq_context: 0 bpf_devs_lock remove_cache_srcu &n->list_lock irq_context: 0 sched_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 &nbd->config_lock console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex pcpu_lock irq_context: 0 &nbd->config_lock console_lock console_srcu console_owner irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#492 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#495 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_node_0 irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &nbd->config_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#494 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#496 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) rcu_read_lock &pool->lock irq_context: 0 rcu_read_lock &sighand->siglock &____s->seqcount#2 irq_context: 0 &type->s_umount_key#47 &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#254 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#498 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem key irq_context: 0 pernet_ops_rwsem &sn->gssp_lock irq_context: 0 pernet_ops_rwsem &cd->hash_lock irq_context: 0 pernet_ops_rwsem cache_list_lock &cd->hash_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#592 &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &c->lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#76 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET k-clock-AF_INET irq_context: 0 pernet_ops_rwsem (&net->can.stattimer) irq_context: 0 pernet_ops_rwsem pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem xfrm_state_gc_work irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &net->xfrm.xfrm_state_lock irq_context: 0 pernet_ops_rwsem (work_completion)(&(&net->ipv6.addr_chk_work)->work) irq_context: 0 pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem k-slock-AF_INET#2 irq_context: 0 pernet_ops_rwsem k-slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-slock-AF_INET#2 pool_lock#2 irq_context: 0 pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock irq_context: 0 pernet_ops_rwsem ip6_fl_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &net->rules_mod_lock irq_context: 0 pernet_ops_rwsem (&net->ipv6.ip6_fib_timer) irq_context: 0 pernet_ops_rwsem rtnl_mutex (&mrt->ipmr_expire_timer) irq_context: 0 pernet_ops_rwsem rtnl_mutex (work_completion)(&ht->run_work) irq_context: 0 pernet_ops_rwsem rtnl_mutex &ht->mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &ht->mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &ht->mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem __ip_vs_mutex irq_context: 0 pernet_ops_rwsem (&ipvs->dest_trash_timer) irq_context: 0 pernet_ops_rwsem (work_completion)(&(&ipvs->expire_nodest_conn_work)->work) irq_context: 0 pernet_ops_rwsem (work_completion)(&(&ipvs->defense_work)->work) irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem (work_completion)(&(&ipvs->est_reload_work)->work) irq_context: 0 pernet_ops_rwsem nfnl_subsys_ipset irq_context: 0 pernet_ops_rwsem recent_lock irq_context: 0 pernet_ops_rwsem hashlimit_mutex irq_context: 0 pernet_ops_rwsem trans_gc_work irq_context: 0 pernet_ops_rwsem (work_completion)(&(&cnet->ecache.dwork)->work) irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem sysfs_symlink_target_lock irq_context: 0 pernet_ops_rwsem kernfs_idr_lock irq_context: 0 pernet_ops_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem tcp_metrics_lock irq_context: 0 pernet_ops_rwsem k-clock-AF_INET irq_context: 0 pernet_ops_rwsem (work_completion)(&net->xfrm.policy_hash_work) irq_context: 0 pernet_ops_rwsem &net->xfrm.xfrm_policy_lock irq_context: 0 pernet_ops_rwsem (work_completion)(&net->xfrm.state_hash_work) irq_context: 0 pernet_ops_rwsem &xa->xa_lock#4 irq_context: 0 pernet_ops_rwsem genl_sk_destructing_waitq.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 nfnl_subsys_cthelper &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override pool_lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock &base->lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pool_lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_cthelper &obj_hash[i].lock irq_context: 0 nfnl_subsys_cthelper &lock->wait_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#76 &rq->__lock irq_context: 0 &bdev->bd_holder_lock irq_context: 0 &lo->lo_mutex fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu &conn->lock irq_context: 0 &nbd->config_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu &obj_hash[i].lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock pool_lock#2 irq_context: 0 &q->mq_freeze_wq irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock irq_context: 0 &lo->lo_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &lo->lo_mutex pool_lock#2 irq_context: 0 &lo->lo_mutex pcpu_alloc_mutex irq_context: 0 &lo->lo_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &lo->lo_mutex cpu_hotplug_lock irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &lo->lo_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 &lo->lo_mutex wq_pool_mutex irq_context: 0 &lo->lo_mutex wq_pool_mutex &wq->mutex irq_context: 0 &lo->lo_mutex wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &lo->lo_mutex &bdev->bd_holder_lock irq_context: 0 &lo->lo_mutex &c->lock irq_context: 0 &lo->lo_mutex &____s->seqcount#2 irq_context: 0 &lo->lo_mutex &____s->seqcount irq_context: 0 &lo->lo_mutex lock irq_context: 0 &lo->lo_mutex lock kernfs_idr_lock irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex &bdev->bd_size_lock irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &rq->__lock irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_lock irq_context: 0 &lo->lo_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &lo->lo_mutex &sem->wait_lock irq_context: 0 &lo->lo_mutex &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &lo->lo_mutex lock kernfs_idr_lock &c->lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock &base->lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &lo->lo_mutex &p->pi_lock &rq->__lock irq_context: 0 &lo->lo_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#76 irq_context: 0 vmap_purge_lock free_vmap_area_lock &meta->lock irq_context: 0 vmap_purge_lock free_vmap_area_lock kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#505 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#505 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#258 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 nfc_devlist_mutex deferred_probe_mutex &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &sem->wait_lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock irq_context: 0 misc_mtx system_transition_mutex rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#296 irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount#2 irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#513 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &p->pi_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#519 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &lo->lo_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex &q->mq_freeze_lock irq_context: 0 &lo->lo_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 &lo->lo_mutex &q->mq_freeze_lock percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &lo->lo_mutex &q->mq_freeze_lock percpu_ref_switch_lock pool_lock#2 irq_context: 0 &lo->lo_mutex percpu_ref_switch_lock irq_context: 0 &lo->lo_mutex &q->mq_freeze_wq irq_context: 0 loop_validate_mutex &lo->lo_mutex &lock->wait_lock irq_context: 0 loop_validate_mutex loop_validate_mutex.wait_lock irq_context: 0 &lo->lo_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 loop_validate_mutex.wait_lock irq_context: 0 loop_validate_mutex stock_lock irq_context: 0 loop_validate_mutex &obj_hash[i].lock irq_context: 0 loop_validate_mutex key irq_context: 0 loop_validate_mutex pcpu_lock irq_context: 0 loop_validate_mutex percpu_counters_lock irq_context: 0 loop_validate_mutex pcpu_lock stock_lock irq_context: 0 loop_validate_mutex &____s->seqcount irq_context: 0 loop_validate_mutex pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#4 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#4 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 ebt_mutex &n->list_lock irq_context: 0 ebt_mutex &n->list_lock &c->lock irq_context: 0 pcpu_alloc_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &p->lock &of->mutex kn->active#4 stock_lock irq_context: 0 &p->lock &of->mutex kn->active#4 key irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#519 irq_context: 0 &p->lock &of->mutex kn->active#4 pcpu_lock irq_context: 0 &p->lock &of->mutex kn->active#4 percpu_counters_lock irq_context: 0 &p->lock &of->mutex kn->active#4 pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &journal->j_list_lock key#13 irq_context: 0 &disk->open_mutex &lo->lo_lock irq_context: 0 &disk->open_mutex &bdev->bd_size_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &disk->open_mutex &root->kernfs_rwsem irq_context: 0 &disk->open_mutex &root->kernfs_rwsem irq_context: 0 &disk->open_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &disk->open_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &disk->open_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &base->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex fs_reclaim rcu_node_0 irq_context: 0 &xt[i].mutex fs_reclaim &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &xt[i].mutex fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex pool_lock#2 irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_node_0 irq_context: 0 &disk->open_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 irq_context: 0 &disk->open_mutex kernfs_idr_lock irq_context: 0 &disk->open_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 &disk->open_mutex uevent_sock_mutex irq_context: 0 &disk->open_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &disk->open_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &disk->open_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex flowtable_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex flowtable_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx batched_entropy_u8.lock crngs.lock irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &meta->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem kfence_freelist_lock irq_context: 0 tomoyo_ss &obj_hash[i].lock pool_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nfnl_subsys_ipset &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nfnl_subsys_ipset &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &nft_net->commit_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 sb_writers#10 &n->list_lock irq_context: 0 sb_writers#10 &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex &rcu_state.expedited_wq irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu kfence_freelist_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock quarantine_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: softirq (&p->timer) &br->multicast_lock &n->list_lock irq_context: softirq (&p->timer) &br->multicast_lock &n->list_lock &c->lock irq_context: softirq (&mp->timer) &br->multicast_lock &n->list_lock irq_context: softirq (&mp->timer) &br->multicast_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex kfence_freelist_lock irq_context: 0 sk_lock-AF_VSOCK &list->lock#21 irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_VSOCK &ei->socket.wq.wait irq_context: 0 (wq_completion)vsock-loopback irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &list->lock#21 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) vsock_table_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) fs_reclaim irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &meta->lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 sb_writers#10 &rq->__lock irq_context: 0 sb_writers#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#10 &cfs_rq->removed.lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex gdp_mutex &n->list_lock irq_context: 0 rtnl_mutex gdp_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &obj_hash[i].lock irq_context: 0 &net->packet.sklist_lock rcu_node_0 irq_context: 0 &net->packet.sklist_lock &rcu_state.expedited_wq irq_context: 0 &net->packet.sklist_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &net->packet.sklist_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &net->packet.sklist_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->packet.sklist_lock &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_wq[0] irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK slock-AF_VSOCK irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK &vvs->tx_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK &obj_hash[i].lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK pool_lock#2 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) slock-AF_VSOCK irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK slock-AF_VSOCK &sk->sk_lock.wq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &c->lock irq_context: 0 sb_writers#10 &obj_hash[i].lock irq_context: 0 sb_writers#10 pool_lock#2 irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events free_ipc_work &____s->seqcount irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &disk->open_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &disk->open_mutex &bdev->bd_holder_lock irq_context: 0 &disk->open_mutex &n->list_lock irq_context: 0 &disk->open_mutex &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)events_unbound (reaper_work).work quarantine_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) slock-AF_VSOCK &sk->sk_lock.wq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) slock-AF_VSOCK &sk->sk_lock.wq &p->pi_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) slock-AF_VSOCK &sk->sk_lock.wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) slock-AF_VSOCK &sk->sk_lock.wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &n->list_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &sem->wait_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock pcpu_lock stock_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &____s->seqcount irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 misc_mtx wq_mayday_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override rcu_node_0 irq_context: 0 cb_lock stock_lock irq_context: 0 cb_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &disk->open_mutex &nbd->config_lock console_owner_lock irq_context: 0 &disk->open_mutex &nbd->config_lock console_owner irq_context: 0 &nbd->config_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem quarantine_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &nbd->config_lock &q->mq_freeze_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#919 irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#919 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#925 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#925 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#592 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#592 irq_context: 0 &nbd->config_lock &rq->__lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#271 irq_context: 0 sb_writers#5 &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)bond0#169 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#622 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &x->wait#2 irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#564 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#207 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#5 &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#5 kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &meta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) kfence_freelist_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock stock_lock irq_context: 0 pcpu_alloc_mutex rcu_node_0 irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->packet.sklist_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->packet.sklist_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 &n->list_lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 bit_wait_table + i irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_NFC &local->raw_sockets.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_NFC irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 rcu_read_lock (console_sem).lock irq_context: 0 rcu_read_lock console_lock console_srcu console_owner_lock irq_context: 0 rcu_read_lock console_lock console_srcu console_owner irq_context: 0 rcu_read_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 rcu_read_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_NFC slock-AF_NFC irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &base->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_NFC &local->raw_sockets.lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_NFC irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 sched_map-wait-type-override stock_lock irq_context: 0 sched_map-wait-type-override key irq_context: 0 sched_map-wait-type-override pcpu_lock irq_context: 0 sched_map-wait-type-override percpu_counters_lock irq_context: 0 sched_map-wait-type-override pcpu_lock stock_lock irq_context: 0 &lo->lo_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#295 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#11 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock pcpu_lock irq_context: 0 cb_lock percpu_counters_lock irq_context: 0 cb_lock pcpu_lock stock_lock irq_context: 0 &nbd->config_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &nbd->config_lock rcu_read_lock &rq->__lock irq_context: 0 &nbd->config_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &nbd->config_lock fs_reclaim irq_context: 0 &nbd->config_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &nbd->config_lock &c->lock irq_context: 0 &nbd->config_lock pool_lock#2 irq_context: 0 &nbd->config_lock &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock &u->lock irq_context: 0 &disk->open_mutex &nbd->config_lock (console_sem).lock irq_context: 0 &disk->open_mutex &nbd->config_lock console_lock console_srcu console_owner_lock irq_context: 0 &disk->open_mutex &nbd->config_lock console_lock console_srcu console_owner irq_context: 0 &disk->open_mutex &nbd->config_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &disk->open_mutex &nbd->config_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &nbd->config_lock &n->list_lock irq_context: 0 &nbd->config_lock &n->list_lock &c->lock irq_context: 0 &nbd->config_lock remove_cache_srcu irq_context: 0 &nbd->config_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &nbd->config_lock remove_cache_srcu &c->lock irq_context: 0 &nbd->config_lock remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim pool_lock#2 irq_context: 0 &hdev->req_lock &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &____s->seqcount#2 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock krc.lock &obj_hash[i].lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock krc.lock &base->lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#317 irq_context: 0 rtnl_mutex &wq->mutex pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC stock_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC key irq_context: 0 &disk->open_mutex &nbd->config_lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 system_transition_mutex device_hotplug_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#521 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#523 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#274 irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#317 irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#592 irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &base->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fs_reclaim stock_lock irq_context: 0 rtnl_mutex fs_reclaim key irq_context: 0 rtnl_mutex fs_reclaim pcpu_lock irq_context: 0 rtnl_mutex fs_reclaim percpu_counters_lock irq_context: 0 rtnl_mutex fs_reclaim pcpu_lock stock_lock irq_context: 0 rtnl_mutex fs_reclaim &____s->seqcount irq_context: 0 &nbd->config_lock remove_cache_srcu pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#278 irq_context: 0 &type->s_umount_key#30 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 shmem_swaplist_mutex &xa->xa_lock#23 key#29 irq_context: 0 &type->s_umount_key#43 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#43 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#278 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#282 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#591 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex percpu_counters_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 rcu_node_0 irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex fs_reclaim &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#308 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound deferred_probe_work &rq->__lock irq_context: 0 (wq_completion)events_unbound deferred_probe_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rq->__lock cpu_asid_lock irq_context: 0 &lo->lo_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &anon_vma->rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 quarantine_lock irq_context: 0 &vma->vm_lock->lock percpu_charge_mutex stock_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock kernfs_pr_cont_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &nbd->config_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &nbd->config_lock &rq->__lock cpu_asid_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &tsk->futex_exit_mutex &rq->__lock cpu_asid_lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &n->list_lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &n->list_lock &c->lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &rq->__lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock kernfs_pr_cont_lock kernfs_rename_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 prog_idr_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss stock_lock irq_context: 0 &p->lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &nft_net->commit_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock#2 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock#2 &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &c->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_ROSE fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock (console_sem).lock irq_context: 0 cb_lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh key#20 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &entry->crc_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem rcu_node_0 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 sb_internal jbd2_handle &rq->__lock cpu_asid_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock kernfs_pr_cont_lock (console_sem).lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock stock_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock key irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &base->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#282 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#534 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock pcpu_lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock rcu_read_lock (console_sem).lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock rcu_read_lock kernfs_pr_cont_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#285 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#622 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem lock#4 irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex stock_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#285 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock percpu_counters_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#43 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle lock#4 irq_context: 0 sb_writers#3 sb_internal jbd2_handle lock#4 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 &f->f_pos_lock &p->lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle lock#4 &lruvec->lru_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 &dir->lock#2 &meta->lock irq_context: 0 &dir->lock#2 kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &f->f_pos_lock &p->lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &p->lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock quarantine_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 &anon_vma->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override percpu_counters_lock irq_context: 0 &anon_vma->rwsem rcu_node_0 irq_context: 0 &anon_vma->rwsem &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#287 irq_context: 0 &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#50 &____s->seqcount#2 irq_context: 0 kn->active#50 &____s->seqcount irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 &vma->vm_lock->lock percpu_charge_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &data->open_mutex gdp_mutex &rq->__lock irq_context: 0 &data->open_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#287 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#538 irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (work_completion)(&local->rx_work) &rq->__lock irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 irq_context: 0 (work_completion)(&local->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#293 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock pcpu_lock stock_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &xa->xa_lock#23 key#29 irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock cgroup_rstat_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex batched_entropy_u8.lock irq_context: 0 system_transition_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 misc_mtx nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock cpu_asid_lock irq_context: softirq rcu_callback put_task_map-wait-type-override stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock quarantine_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &base->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 irq_context: 0 &hdev->req_lock quarantine_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)events (linkwatch_work).work &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock stock_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock key irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 ebt_mutex &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 namespace_sem batched_entropy_u8.lock irq_context: 0 namespace_sem kfence_freelist_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex key irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex pcpu_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock fs_reclaim rcu_node_0 irq_context: 0 slock-AF_PACKET &sk->sk_lock.wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex &nbd->config_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex &nbd->config_lock (console_sem).lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock console_owner_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock console_owner irq_context: 0 cb_lock genl_mutex &nbd->config_lock console_lock console_srcu console_owner_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock console_lock console_srcu console_owner irq_context: 0 cb_lock genl_mutex &nbd->config_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 cb_lock genl_mutex &nbd->config_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem pool_lock#2 irq_context: hardirq|softirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 cb_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 cb_lock genl_mutex &nbd->config_lock &bdev->bd_size_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &q->queue_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &rq->__lock cpu_asid_lock irq_context: 0 &xt[i].mutex free_vmap_area_lock quarantine_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 vmap_purge_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 vmap_purge_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 vmap_purge_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 vmap_purge_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 &sighand->siglock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_node_0 irq_context: 0 &sighand->siglock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX stock_lock irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &__ctx->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem batched_entropy_u8.lock irq_context: 0 &sbi->s_writepages_rwsem kfence_freelist_lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex &nbd->config_lock &ACCESS_PRIVATE(sdp, lock) irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock key#28 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock set->srcu irq_context: 0 cb_lock genl_mutex &nbd->config_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 pcpu_alloc_mutex fs_reclaim &rq->__lock irq_context: 0 pcpu_alloc_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 &dev->mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock key irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 batched_entropy_u8.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 kfence_freelist_lock irq_context: softirq (&timer) rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&timer) rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock rlock-AF_PACKET irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock &pcp->lock &zone->lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#547 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#551 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#556 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &rq->__lock cpu_asid_lock irq_context: 0 &data->open_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &data->open_mutex subsys mutex#74 &rq->__lock irq_context: 0 &data->open_mutex leds_list_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex fs_reclaim &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex triggers_list_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#21/1 &xa->xa_lock#5 &n->list_lock irq_context: 0 &type->s_umount_key#21/1 &xa->xa_lock#5 &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#21/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex rcu_read_lock &rq->__lock irq_context: 0 &lo->lo_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex uevent_sock_mutex &c->lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 cb_lock genl_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh _xmit_ETHER#2 pool_lock#2 irq_context: 0 cb_lock genl_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 kn->active#47 remove_cache_srcu &c->lock irq_context: 0 kn->active#47 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#47 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#47 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#47 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex stock_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex key irq_context: 0 cb_lock genl_mutex rfkill_global_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex percpu_counters_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 kn->active#51 &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 kn->active#51 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex &n->list_lock irq_context: 0 &dev->master_mutex &n->list_lock &c->lock irq_context: 0 &dev->master_mutex &rq->__lock irq_context: 0 &dev->master_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#176 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#173 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 sb_writers#10 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#10 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &meta->lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock pcpu_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &x->wait#3 irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock rcu_read_lock kernfs_pr_cont_lock kernfs_rename_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#174 irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock rcu_read_lock kernfs_pr_cont_lock (console_sem).lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#39 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#39 irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock batched_entropy_u8.lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock rcu_read_lock cgroup_file_kn_lock irq_context: 0 cb_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 cb_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 cb_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock key irq_context: 0 &pipe->mutex/1 &mm->mmap_lock pcpu_lock irq_context: 0 &dentry->d_lock rcu_read_lock &p->pi_lock irq_context: 0 &dentry->d_lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &dentry->d_lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#23 &p->pi_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock kfence_freelist_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 &sighand->siglock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock set->srcu irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 &pipe->mutex/1 &mm->mmap_lock percpu_counters_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 krc.lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#39 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#38 irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC (console_sem).lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC console_lock console_srcu console_owner_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#313 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC console_lock console_srcu console_owner irq_context: 0 cb_lock nlk_cb_mutex-GENERIC console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC console_lock console_srcu console_owner console_owner_lock irq_context: 0 sk_lock-AF_ROSE rose_node_list_lock irq_context: 0 sk_lock-AF_ROSE &rq->__lock irq_context: 0 sk_lock-AF_ROSE &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &nft_net->commit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock stock_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_PACKET stock_lock irq_context: 0 sk_lock-AF_PACKET key irq_context: 0 sk_lock-AF_PACKET pcpu_lock irq_context: 0 sk_lock-AF_PACKET percpu_counters_lock irq_context: 0 sk_lock-AF_PACKET pcpu_lock stock_lock irq_context: 0 sk_lock-AF_PACKET slock-AF_PACKET &sk->sk_lock.wq irq_context: 0 slock-AF_PACKET &sk->sk_lock.wq irq_context: 0 slock-AF_PACKET &sk->sk_lock.wq &p->pi_lock irq_context: 0 slock-AF_PACKET &sk->sk_lock.wq &p->pi_lock &rq->__lock irq_context: 0 slock-AF_PACKET &sk->sk_lock.wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_PACKET &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&peer->timer_send_keepalive) &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &data->open_mutex fs_reclaim &rq->__lock irq_context: 0 &data->open_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#50 remove_cache_srcu &c->lock irq_context: 0 kn->active#50 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#50 remove_cache_srcu rcu_node_0 irq_context: 0 kn->active#50 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 kn->active#50 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 kn->active#50 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 kn->active#50 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#50 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#50 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#50 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#50 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#50 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex rcu_node_0 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 (wq_completion)nfc39_nci_tx_wq#3 irq_context: 0 purge_vmap_area_lock &meta->lock irq_context: 0 purge_vmap_area_lock kfence_freelist_lock irq_context: 0 rcu_read_lock &zone->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &zone->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &meta->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex param_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex param_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &p->pi_lock irq_context: 0 &x->wait#28 irq_context: 0 &x->wait#28 &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem quarantine_lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#28 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#28 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock console_owner_lock irq_context: 0 rcu_read_lock console_owner irq_context: 0 &type->i_mutex_dir_key#6 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 quarantine_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#557 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#557 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#92 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &meta->lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock kernfs_pr_cont_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 sk_lock-AF_RDS irq_context: 0 sk_lock-AF_RDS &rq->__lock irq_context: 0 sk_lock-AF_RDS &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_RDS slock-AF_RDS irq_context: 0 sk_lock-AF_RDS &mm->mmap_lock irq_context: 0 sk_lock-AF_RDS rds_trans_sem irq_context: 0 slock-AF_RDS irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 &mm->mmap_lock sb_writers#5 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 stock_lock irq_context: 0 &type->i_mutex_dir_key#4 key irq_context: 0 &type->i_mutex_dir_key#4 pcpu_lock irq_context: 0 &type->i_mutex_dir_key#4 percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 pcpu_lock stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 &sqd->lock &lock->wait_lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#38 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#38 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ids->rwsem rcu_read_lock &new->lock#2 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dum_hcd->dum->lock hcd_urb_list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dum_hcd->dum->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dum_hcd->dum->lock &base->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dum_hcd->dum->lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &x->wait#19 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &base->lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &n->list_lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 key irq_context: 0 &sb->s_type->i_mutex_key#9 pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#9 percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#9 pcpu_lock stock_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex (&timer.timer) irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#6 rcu_node_0 irq_context: 0 sb_writers#6 &rcu_state.expedited_wq irq_context: 0 sb_writers#6 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle percpu_counters_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 &ids->rwsem remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#3 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &x->wait#9 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &k->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock stock_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock pcpu_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#3 irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mount_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&net->ipv6.addr_chk_work)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex &k->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 fs_reclaim irq_context: 0 &type->i_mutex_dir_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#3 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 bpf_devs_lock irq_context: 0 bpf_devs_lock fs_reclaim irq_context: 0 bpf_devs_lock fs_reclaim &rq->__lock irq_context: 0 bpf_devs_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 bpf_devs_lock pool_lock#2 irq_context: 0 bpf_devs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override batched_entropy_u8.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 bpf_devs_lock &c->lock irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu quarantine_lock irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu &c->lock irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu &n->list_lock irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 proto_tab_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 proto_tab_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 proto_tab_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rcu_read_lock &new->lock#2 &sma->sems[i].lock irq_context: 0 nfnl_subsys_ctnetlink remove_cache_srcu pool_lock#2 irq_context: 0 &fsnotify_mark_srcu &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_INET6 &rq->__lock cpu_asid_lock irq_context: 0 proto_tab_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 proto_tab_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &fsnotify_mark_srcu &rq->__lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 fs_reclaim &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 rtnl_mutex dev_addr_sem &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&net->ipv6.addr_chk_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override kfence_freelist_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#560 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1074 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex kfence_freelist_lock irq_context: 0 cb_lock &rcu_state.expedited_wq irq_context: 0 cb_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex console_owner_lock irq_context: 0 cb_lock genl_mutex console_owner irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 &data->open_mutex uevent_sock_mutex stock_lock irq_context: 0 &data->open_mutex uevent_sock_mutex key irq_context: 0 &data->open_mutex uevent_sock_mutex pcpu_lock irq_context: 0 &data->open_mutex uevent_sock_mutex percpu_counters_lock irq_context: 0 &data->open_mutex uevent_sock_mutex pcpu_lock stock_lock irq_context: 0 &data->open_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 lock kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 bus_type_sem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 sysfs_symlink_target_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &dev->power.lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_owner_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 dpm_list_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 req_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &x->wait#11 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex stock_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex pool_lock#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock stock_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock key irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock pcpu_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock percpu_counters_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock pcpu_lock stock_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem fw_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1082 irq_context: 0 rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 rcu_read_lock_bh &c->lock irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 kn->active#48 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#48 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_tx_wq#3 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock key irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock pcpu_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock percpu_counters_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&tsk->oom_reaper_timer) irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 &obj_hash[i].lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnettable->lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnettable->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &base->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#670 irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1133 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &base->lock irq_context: 0 rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex device_links_srcu &rq->__lock irq_context: 0 &ep->mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 &ep->mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &ep->mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &ep->mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &base->lock &obj_hash[i].lock irq_context: 0 bt_proto_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 bt_proto_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu pcpu_lock irq_context: 0 bt_proto_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#14 irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#8 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex key irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock nl_table_wait.lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#8 &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#14 irq_context: 0 rtnl_mutex fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock irq_context: softirq rcu_read_lock &br->hash_lock nl_table_wait.lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#4 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#22 &____s->seqcount#2 irq_context: 0 kn->active#22 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &obj_hash[i].lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex pool_lock#2 irq_context: 0 rtnl_mutex &pnettable->lock &rq->__lock irq_context: 0 sb_writers#3 fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock &br->multicast_lock nl_table_wait.lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock &base->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu percpu_counters_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock kfence_freelist_lock irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_lock_key#23 bit_wait_table + i irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &ids->rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 kfence_freelist_lock irq_context: 0 bt_proto_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 bt_proto_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)nfc2_nci_rx_wq#622 irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &n->list_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_node_0 irq_context: 0 &hdev->req_lock pool_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex key irq_context: 0 cb_lock genl_mutex rtnl_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex percpu_counters_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 sb_writers#3 &mm->mmap_lock &folio_wait_table[i] irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)nfc2_nci_tx_wq#621 irq_context: 0 sb_writers#3 stock_lock irq_context: 0 sb_writers#3 pcpu_lock stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)nfc5_nci_rx_wq#13 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex net_rwsem rcu_node_0 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#3 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#563 irq_context: 0 &mm->mmap_lock pool_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex irq_context: 0 &net->xfrm.xfrm_cfg_mutex fs_reclaim irq_context: 0 &net->xfrm.xfrm_cfg_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &net->xfrm.xfrm_cfg_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pool_lock#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex &obj_hash[i].lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &net->xfrm.xfrm_policy_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex rlock-AF_KEY irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#14 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#4 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET#2 quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#563 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rlock-AF_KEY irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &c->lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#297 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) kfence_freelist_lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#4 irq_context: 0 (wq_completion)wg-kex-wg0#303 irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#4 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex device_links_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex device_links_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock quarantine_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex percpu_counters_lock irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock &rq->__lock &base->lock irq_context: softirq rcu_callback &x->wait#2 &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &ctx->uring_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) &rq->__lock irq_context: 0 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER rcu_node_0 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &rcu_state.expedited_wq irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM kfence_freelist_lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &base->lock &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#21/1 &xa->xa_lock#5 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#284 irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &____s->seqcount#2 irq_context: 0 &sighand->siglock &base->lock irq_context: 0 &sighand->siglock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work &base->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work &base->lock &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) kfence_freelist_lock irq_context: 0 &mapping->i_mmap_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &vmpr->sr_lock irq_context: 0 (wq_completion)events (work_completion)(&vmpr->work) irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock key irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &rq->__lock irq_context: 0 kn->active#21 &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#23 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: softirq rcu_read_lock &br->hash_lock quarantine_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 namespace_sem remove_cache_srcu irq_context: 0 namespace_sem remove_cache_srcu quarantine_lock irq_context: 0 namespace_sem remove_cache_srcu &c->lock irq_context: 0 namespace_sem remove_cache_srcu &n->list_lock irq_context: 0 namespace_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 namespace_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 namespace_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#10 tomoyo_ss remove_cache_srcu irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &n->list_lock &c->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 &data->open_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#10 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#10 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#10 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#10 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &mapping->i_private_lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &p->lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 &hdev->req_lock &hdev->lock quarantine_lock irq_context: 0 &hdev->lock remove_cache_srcu irq_context: 0 rtnl_mutex _xmit_ETHER batched_entropy_u8.lock irq_context: 0 rtnl_mutex _xmit_ETHER kfence_freelist_lock irq_context: 0 rtnl_mutex &nft_net->commit_mutex &rq->__lock irq_context: 0 rtnl_mutex &nft_net->commit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &hdev->lock remove_cache_srcu quarantine_lock irq_context: 0 &hdev->lock remove_cache_srcu &c->lock irq_context: 0 &hdev->lock remove_cache_srcu &n->list_lock irq_context: 0 &hdev->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &hdev->lock remove_cache_srcu &rq->__lock irq_context: 0 &hdev->lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex (inetaddr_chain).rwsem (inetaddr_chain).rwsem.wait_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem.wait_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem stock_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &ep->mtx fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex pcpu_lock stock_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wq->mutex &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock quarantine_lock irq_context: 0 sb_writers#10 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (inetaddr_chain).rwsem (inetaddr_chain).rwsem.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (inetaddr_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (inetaddr_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#10 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#10 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#3 key irq_context: 0 sb_writers#3 pcpu_lock irq_context: 0 sb_writers#3 percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 lock#4 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock kfence_freelist_lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 lock#5 irq_context: 0 sb_writers#3 &lruvec->lru_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 rtnl_mutex &br->hash_lock nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex quarantine_lock irq_context: 0 &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 &disk->open_mutex &nbd->config_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &disk->open_mutex &nbd->config_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u8.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock kfence_freelist_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &meta->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 kn->active#52 remove_cache_srcu irq_context: 0 kn->active#52 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#52 remove_cache_srcu &c->lock irq_context: 0 &disk->open_mutex &nbd->config_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &disk->open_mutex &nbd->config_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &disk->open_mutex &nbd->config_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock percpu_counters_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#8 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex.wait_lock irq_context: 0 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#23 remove_cache_srcu irq_context: 0 kn->active#23 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#23 remove_cache_srcu &c->lock irq_context: 0 kn->active#23 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 cb_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#3 batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock sb_writers#3 kfence_freelist_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &meta->lock irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->lock irq_context: 0 sb_writers#4 oom_adj_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &net->cells_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock quarantine_lock irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu quarantine_lock irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu &c->lock irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu &n->list_lock irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_PACKET remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock irq_context: 0 nfnl_subsys_cthelper rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_cthelper &rcu_state.expedited_wq irq_context: 0 nfnl_subsys_cthelper &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfnl_subsys_cthelper &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfnl_subsys_cthelper &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem key#3 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &obj_hash[i].lock irq_context: 0 k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 k-sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 kn->active#52 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#52 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 cb_lock rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock rcu_read_lock &sighand->siglock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock &sighand->siglock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 lock#4 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &ctrl->lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex pool_lock#2 irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex stock_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_prealloc_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_list_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#297 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 &obj_hash[i].lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex remove_cache_srcu stock_lock irq_context: 0 rtnl_mutex remove_cache_srcu key irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX key irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex bpf_devs_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex bpf_devs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX pcpu_lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 stock_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 key irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pcpu_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &pipe->mutex/1 key irq_context: 0 &pipe->mutex/1 pcpu_lock irq_context: 0 &pipe->mutex/1 percpu_counters_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)events fqdir_free_work quarantine_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &rq->__lock &base->lock irq_context: 0 &mm->mmap_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex kfence_freelist_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER fs_reclaim irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] once_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] once_lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &tn->lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &tn->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tn->lock pool_lock#2 irq_context: 0 &f->f_pos_lock &p->lock &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 vlan_ioctl_mutex rtnl_mutex &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#46/1 binderfs_minors_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 rtnl_mutex &br->lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &xa->xa_lock#23 key#29 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &xa->xa_lock#23 &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &xa->xa_lock#23 pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex bus_type_sem &rq->__lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#297 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 &lo->lo_mutex &bdev->bd_holder_lock &rq->__lock irq_context: 0 &lo->lo_mutex &bdev->bd_holder_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_PACKET &rnp->exp_wq[3] irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 misc_mtx console_lock console_srcu console_owner_lock irq_context: 0 misc_mtx console_lock console_srcu console_owner irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx console_lock console_srcu console_owner &port_lock_key irq_context: 0 misc_mtx console_lock console_srcu console_owner console_owner_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem percpu_counters_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &nbd->config_lock key irq_context: 0 &nbd->config_lock pcpu_lock irq_context: 0 &nbd->config_lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &nbd->config_lock percpu_counters_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_NFC/1 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &nf_conntrack_locks[i] rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &meta->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_IEEE802154 rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NFC/1 slock-AF_NFC irq_context: 0 &xa->xa_lock &c->lock irq_context: 0 &xa->xa_lock &n->list_lock irq_context: 0 &xa->xa_lock &n->list_lock &c->lock irq_context: 0 &xa->xa_lock &____s->seqcount irq_context: 0 &xa->xa_lock pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &xa->xa_lock#23 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 &list->lock#22 irq_context: 0 (wq_completion)netns net_cleanup_work per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 cb_lock genl_mutex &meta->lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) rlock-AF_NETLINK irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) &dir->lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &n->list_lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 sk_lock-AF_INET6 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#285 irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 sk_lock-AF_PPPOX irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sk_lock-AF_PPPOX slock-AF_PPPOX irq_context: 0 lock prog_idr_lock irq_context: 0 lock prog_idr_lock pool_lock#2 irq_context: 0 bpf_lock irq_context: 0 prog_idr_lock &obj_hash[i].lock irq_context: 0 prog_idr_lock pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock pool_lock irq_context: 0 nfc_devlist_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET6 rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock &base->lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink &lock->wait_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &pcp->lock &zone->lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) put_task_map-wait-type-override#3 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) put_task_map-wait-type-override#3 percpu_counters_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) put_task_map-wait-type-override#3 pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) put_task_map-wait-type-override#3 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) put_task_map-wait-type-override#3 stock_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 lock prog_idr_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 lock prog_idr_lock &n->list_lock irq_context: 0 lock prog_idr_lock &n->list_lock &c->lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle bit_wait_table + i irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock pool_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: softirq (&dsp_spl_tl) dsp_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&dsp_spl_tl) dsp_lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&dsp_spl_tl) dsp_lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&dsp_spl_tl) dsp_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&dsp_spl_tl) dsp_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock key irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 &__ctx->lock irq_context: 0 &xa->xa_lock#23 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&({ do { const void *__vpp_verify = (typeof((&memcg_stock) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&memcg_stock))) *)((&memcg_stock))); (typeof((typeof(*((&memcg_stock))) *)((&memcg_stock)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &n->list_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex &nbd->config_lock &base->lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &disk->open_mutex &nbd->config_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#300 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 &nbd->config_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock fs_reclaim key irq_context: 0 &mm->mmap_lock fs_reclaim pcpu_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &cfs_rq->removed.lock irq_context: 0 &ei->i_data_sem &mapping->i_private_lock irq_context: 0 &group->mark_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_internal jbd2_handle key#4 irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock rlock-AF_PACKET irq_context: 0 &ids->rwsem &rcu_state.expedited_wq irq_context: 0 &ids->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem batched_entropy_u8.lock crngs.lock irq_context: 0 rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem stock_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock quarantine_lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem stock_lock irq_context: 0 sk_lock-AF_PACKET &rnp->exp_wq[2] irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem key irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 sk_lock-AF_PACKET &rnp->exp_wq[0] irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX slock-AF_PPPOX irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &pn->hash_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX clock-AF_PPPOX irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_PPPOX irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &meta->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock krc.lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex pool_lock#2 irq_context: 0 sk_lock-AF_PACKET &rnp->exp_wq[1] irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &base->lock irq_context: 0 &pipe->mutex/1 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem key irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem pcpu_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem percpu_counters_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_wq[2] irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu &n->list_lock irq_context: 0 rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rds_ib_devices_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rds_ib_devices_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rds_ib_devices_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock batched_entropy_u8.lock crngs.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 cb_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 cb_lock rcu_read_lock kfence_freelist_lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle rcu_node_0 irq_context: 0 sb_internal jbd2_handle &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rnp->exp_wq[0] irq_context: 0 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 misc_mtx remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#311 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &base->lock irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &k->k_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (pm_chain_head).rwsem (pm_chain_head).rwsem.wait_lock irq_context: 0 (pm_chain_head).rwsem &rq->__lock irq_context: 0 (pm_chain_head).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem.wait_lock irq_context: 0 misc_mtx system_transition_mutex &p->pi_lock irq_context: 0 misc_mtx system_transition_mutex &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nl_table_lock &meta->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &hdev->req_lock &hdev->lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 pernet_ops_rwsem nl_table_lock kfence_freelist_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock key irq_context: 0 &sig->cred_guard_mutex sb_writers#3 rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ebt_mutex &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &base->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle batched_entropy_u8.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle kfence_freelist_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &meta->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) kfence_freelist_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 subsys mutex#77 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 subsys mutex#77 &k->k_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex (console_sem).lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex deferred_probe_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &sem->wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex hcd_urb_list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 kn->active#47 &____s->seqcount#2 irq_context: 0 &nbd->config_lock remove_cache_srcu &____s->seqcount irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &sig->cred_guard_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &sig->cred_guard_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex req_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &x->wait#11 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 &simple_offset_xa_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &base->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 &simple_offset_xa_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &type->i_mutex_dir_key#2 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key/1 &simple_offset_xa_lock irq_context: 0 &p->lock &of->mutex kn->active#8 &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex subsys mutex#77 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex subsys mutex#77 &k->k_lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex subsys mutex#77 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &x->wait#9 irq_context: 0 &hdev->req_lock &hdev->lock pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ret->b_state_lock bit_wait_table + i irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex dpm_list_mtx irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex kernfs_idr_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex pool_lock#2 irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex gdp_mutex &k->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex &usblp->mut irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex &usblp->mut &usblp->wwait irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex &usblp->mut &usblp->rwait irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex &usblp->mut &anchor->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex &anchor->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex (console_sem).lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex usblp_mutex pool_lock#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex klist_remove_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex kn->active#11 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock rcu_node_0 irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock &rcu_state.expedited_wq irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#4 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 misc_mtx system_transition_mutex &n->list_lock irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 misc_mtx system_transition_mutex &n->list_lock &c->lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex leds_list_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 &xt[i].mutex rcu_read_lock key irq_context: 0 &xt[i].mutex rcu_read_lock pcpu_lock irq_context: 0 &xt[i].mutex rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex stock_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex key irq_context: 0 rtnl_mutex pcpu_alloc_mutex percpu_counters_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex pcpu_lock stock_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 &sighand->siglock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#311 &rq->__lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &p->lock binder_dead_nodes_lock irq_context: 0 &f->f_pos_lock &p->lock binder_procs_lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#4 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override batched_entropy_u8.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override kfence_freelist_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &rq->__lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fill_pool_map-wait-type-override rcu_node_0 irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 fill_pool_map-wait-type-override &rcu_state.expedited_wq irq_context: 0 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock quarantine_lock irq_context: 0 uevent_sock_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_VSOCK &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK &meta->lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK kfence_freelist_lock irq_context: 0 &disk->open_mutex remove_cache_srcu irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &disk->open_mutex remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem thermal_list_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem thermal_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex remove_cache_srcu &c->lock irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) pool_lock#2 irq_context: 0 &disk->open_mutex remove_cache_srcu &rq->__lock irq_context: 0 &disk->open_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &nbd->config_lock rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 batched_entropy_u8.lock crngs.lock irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 &((cluster_info + ci)->lock)#2 irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 &mm->mmap_lock &xa->xa_lock#23 &ctrl->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc11_nci_rx_wq#3 &rq->__lock irq_context: 0 &sig->cred_guard_mutex fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 sb_writers#9 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#9 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 &lo->lo_mutex batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem stock_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem key irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pcpu_lock irq_context: 0 &lo->lo_mutex kfence_freelist_lock irq_context: 0 &vma->vm_lock->lock lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &anon_vma->rwsem percpu_counters_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#568 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#570 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#570 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers &type->i_mutex_dir_key#2 batched_entropy_u32.lock crngs.lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)bond0#170 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#311 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#4 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 kn->active#23 &rq->__lock irq_context: 0 kn->active#23 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &hdev->req_lock (work_completion)(&(&hdev->interleave_scan)->work) &rq->__lock irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 fill_pool_map-wait-type-override &n->list_lock irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock memcg_oom_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock pool_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock irq_context: 0 &hdev->req_lock (work_completion)(&(&hdev->interleave_scan)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 &ctx->wqh &alarm_bases[i].lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock &xa->xa_lock#9 irq_context: 0 &mm->mmap_lock &sb->s_type->i_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 &disk->open_mutex uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 nl_table_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock &n->list_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 stack_depot_init_mutex &rq->__lock irq_context: 0 stack_depot_init_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &info->lock irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sk_lock-AF_UNIX fs_reclaim irq_context: 0 sk_lock-AF_UNIX fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#300 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_node_0 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &mapping->i_private_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 rtnl_mutex remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_UNIX fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#4 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 stock_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 key irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 pcpu_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 percpu_counters_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 pcpu_lock stock_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pcpu_alloc_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock kernfs_pr_cont_lock kernfs_rename_lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock rcu_read_lock (console_sem).lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock rcu_read_lock kernfs_pr_cont_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle stock_lock irq_context: 0 sb_writers#3 jbd2_handle key irq_context: 0 sb_writers#3 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 jbd2_handle pcpu_lock stock_lock irq_context: 0 kn->active#23 &____s->seqcount#2 irq_context: 0 &ep->mtx stock_lock irq_context: 0 kn->active#23 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock rcu_node_0 irq_context: 0 &ep->mtx key irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#4 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx pcpu_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx percpu_counters_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#147 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#21 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 &ep->mtx pcpu_lock stock_lock irq_context: 0 &disk->open_mutex nbd_index_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sqd->lock &obj_hash[i].lock pool_lock irq_context: 0 &nbd->config_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock rcu_read_lock kernfs_pr_cont_lock kernfs_rename_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock rcu_read_lock kernfs_pr_cont_lock (console_sem).lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#5 rcu_node_0 irq_context: 0 &mm->mmap_lock sb_writers#5 &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock sb_writers#5 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex dpm_list_mtx &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle &ret->b_state_lock bit_wait_table + i irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_wq[3] irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem nl_table_wait.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 &pipe->mutex/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_UNIX stock_lock irq_context: 0 &nbd->config_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sk_lock-AF_NETLINK stock_lock irq_context: 0 sk_lock-AF_NETLINK &f->f_lock irq_context: 0 sk_lock-AF_NETLINK &f->f_lock fasync_lock irq_context: 0 sk_lock-AF_NETLINK &f->f_lock fasync_lock &new->fa_lock irq_context: 0 rtnl_mutex stack_depot_init_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NETLINK &f->f_lock fasync_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock pcpu_lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock pcpu_lock stock_lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_UNIX &f->f_lock irq_context: 0 sk_lock-AF_UNIX &f->f_lock fasync_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &new->fa_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &new->fa_lock &f->f_owner.lock irq_context: 0 sk_lock-AF_UNIX &f->f_lock fasync_lock &new->fa_lock irq_context: 0 sk_lock-AF_UNIX &f->f_lock fasync_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)nfc3_nci_tx_wq#303 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 sb_writers#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 rtnl_mutex &pnettable->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 sb_writers#7 rcu_node_0 irq_context: 0 sb_writers#7 &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sb_writers#7 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#292 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock quarantine_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &nbd->config_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu stock_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ctx->uring_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu pcpu_lock irq_context: 0 &xa->xa_lock#20 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &xa->xa_lock#20 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_owner.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock rlock-AF_PACKET irq_context: 0 &type->i_mutex_dir_key#4 &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key#4 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &rcu_state.expedited_wq irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock hcd->address0_mutex &queue->lock semaphore->lock#2 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#5 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 nfc_devlist_mutex kn->active#4 irq_context: 0 nfc_devlist_mutex kn->active#4 &root->deactivate_waitq irq_context: 0 nfc_devlist_mutex kn->active#4 &rq->__lock irq_context: 0 nfc_devlist_mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &dev->tx_global_lock _xmit_NONE#2 irq_context: 0 rtnl_mutex &wg->device_update_lock (&peer->timer_retransmit_handshake) irq_context: 0 rtnl_mutex &wg->device_update_lock &base->lock irq_context: 0 rtnl_mutex &wg->device_update_lock (&peer->timer_send_keepalive) irq_context: 0 rtnl_mutex &wg->device_update_lock (&peer->timer_new_handshake) irq_context: 0 rtnl_mutex &wg->device_update_lock (&peer->timer_zero_key_material) irq_context: 0 rtnl_mutex &wg->device_update_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock (&peer->timer_persistent_keepalive) irq_context: 0 rtnl_mutex &wg->device_update_lock (work_completion)(&peer->clear_peer_work) irq_context: 0 rtnl_mutex &wg->device_update_lock &handshake->lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &meta->lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock kfence_freelist_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &handshake->lock &table->lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &base->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 pool_lock#2 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock pool_lock#2 irq_context: 0 rtnl_mutex &r->consumer_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem stock_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem key irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem percpu_counters_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#304 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 nfnl_subsys_ctnetlink_exp nf_conntrack_expect_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock pool_lock#2 irq_context: 0 kn->active#20 &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 cb_lock genl_mutex bus_type_sem &rq->__lock irq_context: 0 cb_lock genl_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#49 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#4 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tn->lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_rx_wq#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#4 irq_context: 0 rtnl_mutex &wg->socket_update_lock irq_context: 0 rtnl_mutex &table->hash[i].lock irq_context: 0 rtnl_mutex &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex k-clock-AF_INET irq_context: 0 rtnl_mutex k-clock-AF_INET6 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fib_info_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fib_info_lock pool_lock#2 irq_context: 0 cb_lock fs_reclaim &cfs_rq->removed.lock irq_context: 0 cb_lock fs_reclaim &obj_hash[i].lock irq_context: 0 cb_lock fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 key irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 percpu_counters_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 (wq_completion)events (linkwatch_work).work pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&hcd->wakeup_work) &dev->mutex &hub->status_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 br_ioctl_mutex rtnl_mutex irq_context: 0 br_ioctl_mutex rtnl_mutex &mm->mmap_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#3 irq_context: 0 br_ioctl_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &rq->__lock irq_context: 0 br_ioctl_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 &xt[i].mutex remove_cache_srcu stock_lock irq_context: 0 &xt[i].mutex remove_cache_srcu key irq_context: 0 &vma->vm_lock->lock fs_reclaim &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &lock->wait_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &lock->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#3 &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock stock_lock irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 &tsk->futex_exit_mutex &obj_hash[i].lock pool_lock irq_context: 0 kn->active#46 &____s->seqcount#2 irq_context: 0 kn->active#46 &____s->seqcount irq_context: 0 kn->active#46 &n->list_lock irq_context: 0 kn->active#46 &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#5 irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rq->__lock cpu_asid_lock irq_context: 0 &q->mq_freeze_lock &rq->__lock irq_context: 0 &q->mq_freeze_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sqd->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#5 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)nfc21_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#3 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &base->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock batched_entropy_u8.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock kfence_freelist_lock irq_context: 0 &xt[i].mutex remove_cache_srcu pcpu_lock irq_context: 0 br_ioctl_mutex rtnl_mutex fs_reclaim irq_context: 0 br_ioctl_mutex rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &group->mark_mutex key irq_context: 0 &group->mark_mutex pcpu_lock irq_context: 0 &group->mark_mutex percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 br_ioctl_mutex rtnl_mutex stock_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &c->lock irq_context: 0 br_ioctl_mutex rtnl_mutex pool_lock#2 irq_context: 0 &xt[i].mutex remove_cache_srcu percpu_counters_lock irq_context: 0 &xt[i].mutex remove_cache_srcu pcpu_lock stock_lock irq_context: 0 br_ioctl_mutex rtnl_mutex stack_depot_init_mutex irq_context: 0 br_ioctl_mutex rtnl_mutex crngs.lock irq_context: 0 br_ioctl_mutex rtnl_mutex &obj_hash[i].lock irq_context: 0 br_ioctl_mutex rtnl_mutex krc.lock irq_context: 0 br_ioctl_mutex rtnl_mutex &dir->lock#2 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 br_ioctl_mutex rtnl_mutex &n->list_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &n->list_lock &c->lock irq_context: 0 br_ioctl_mutex rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 &lo->lo_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 &lo->lo_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc22_nci_rx_wq#3 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)nfc22_nci_tx_wq#3 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#5 quarantine_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock kfence_freelist_lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock rlock-AF_PACKET irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#5 &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &lruvec->lru_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#5 irq_context: 0 cb_lock genl_mutex rcu_read_lock &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock rcu_node_0 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC genl_mutex irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock pool_lock#2 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#5 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock &c->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rcu_read_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#298 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 kn->active#49 remove_cache_srcu irq_context: 0 kn->active#49 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#49 remove_cache_srcu &c->lock irq_context: 0 kn->active#49 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#49 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock stock_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock key irq_context: 0 sk_lock-AF_INET rcu_read_lock pcpu_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#4 irq_context: 0 sk_lock-AF_INET rcu_read_lock percpu_counters_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)nfc12_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#4 irq_context: 0 nfc_devlist_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#4 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 quarantine_lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#3 irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#4 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#580 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#4 irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#4 irq_context: 0 &ei->i_data_sem &bgl->locks[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: softirq rcu_callback &dir->lock &obj_hash[i].lock irq_context: softirq rcu_callback &dir->lock pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#4 irq_context: 0 &p->lock &of->mutex kn->active#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnn->pndevs.lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnn->pndevs.lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#4 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#7 irq_context: 0 &mm->mmap_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#5 irq_context: 0 &child->perf_event_mutex rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ctx->uring_lock percpu_ref_switch_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#5 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &base->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock key#23 irq_context: 0 &disk->open_mutex &nbd->config_lock &base->lock irq_context: 0 &disk->open_mutex &nbd->config_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#5 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem pool_lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_tx_wq#5 irq_context: 0 &data->open_mutex rcu_node_0 irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &mm->mmap_lock stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 rtnl_mutex &br->lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)nfc18_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#5 irq_context: 0 rtnl_mutex &br->lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex &br->lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#581 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex remove_cache_srcu pcpu_lock irq_context: 0 rtnl_mutex remove_cache_srcu percpu_counters_lock irq_context: 0 rtnl_mutex remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock kfence_freelist_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock key irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock pcpu_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock percpu_counters_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem leds_list_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &lo->lo_mutex remove_cache_srcu irq_context: 0 (wq_completion)nfc2_nci_tx_wq#580 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#584 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#584 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#584 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &dir->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#96 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#96 &rq->__lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &rq->__lock irq_context: 0 &xt[i].mutex pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#98 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#98 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#587 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#97 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#96 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#593 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#593 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy340 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#601 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&w->w) rcu_node_0 irq_context: 0 &lo->lo_mutex remove_cache_srcu quarantine_lock irq_context: 0 &lo->lo_mutex remove_cache_srcu &c->lock irq_context: 0 &lo->lo_mutex remove_cache_srcu &n->list_lock irq_context: 0 &lo->lo_mutex remove_cache_srcu &rq->__lock irq_context: 0 &child->perf_event_mutex pool_lock#2 irq_context: 0 &lo->lo_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &lo->lo_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#601 &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#601 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#322 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)nfc19_nci_tx_wq#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock sb_writers#5 key irq_context: 0 &mm->mmap_lock sb_writers#5 pcpu_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &rnp->exp_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 kn->active#23 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#606 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#606 &rq->__lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex nbd_index_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex nbd_index_mutex rlock-AF_NETLINK irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#606 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1100 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#611 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#611 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock &obj_hash[i].lock pool_lock irq_context: 0 &sig->cred_guard_mutex pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#612 irq_context: 0 &nbd->config_lock &cfs_rq->removed.lock irq_context: 0 &nbd->config_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) pool_lock#2 irq_context: 0 cb_lock genl_mutex nbd_index_mutex &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#613 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#328 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#613 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#613 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#614 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &sem->wait_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xdp.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->xdp.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &xs->map_list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &xs->mutex irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_XDP irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &sem->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#623 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#623 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#623 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#616 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#616 &rq->__lock irq_context: 0 kn->active#20 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#104 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#330 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#618 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#332 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#332 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#619 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#621 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#622 irq_context: 0 (wq_completion)netns net_cleanup_work ucounts_lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &n->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg2#151 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#305 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: softirq (&peer->timer_new_handshake) irq_context: softirq (&peer->timer_new_handshake) &peer->endpoint_lock irq_context: softirq (&peer->timer_new_handshake) rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&peer->timer_new_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&peer->timer_new_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&peer->timer_new_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&peer->timer_new_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#5 percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &xt[i].mutex free_vmap_area_lock &meta->lock irq_context: 0 &xt[i].mutex free_vmap_area_lock kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex &fn->fou_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock stock_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock key irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock percpu_counters_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock pcpu_lock stock_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &pcp->lock &zone->lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 nfc_devlist_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#304 irq_context: 0 misc_mtx system_transition_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &lo->lo_mutex remove_cache_srcu pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sk_lock-AF_NFC/1 &rq->__lock irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx system_transition_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex nbd_index_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex nbd_index_mutex &____s->seqcount irq_context: 0 &child->perf_event_mutex &rq->__lock cpu_asid_lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#316 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#633 irq_context: 0 misc_mtx &dev->mutex stock_lock irq_context: 0 misc_mtx &dev->mutex key irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#633 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#633 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#634 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#635 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#635 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#640 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#640 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#641 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock irq_context: 0 misc_mtx &dev->mutex pcpu_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#3 irq_context: 0 &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 misc_mtx &dev->mutex percpu_counters_lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#384 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#385 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#4 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 sb_writers#10 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#10 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#385 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#385 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#385 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#501 irq_context: 0 rcu_state.exp_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#176 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#15 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &disk->open_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &disk->open_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &disk->open_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex rcu_read_lock &rq->__lock irq_context: 0 &disk->open_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_tx_wq#4 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rnp->exp_wq[1] irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock set->srcu &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 ebt_mutex &mm->mmap_lock key irq_context: 0 ebt_mutex &mm->mmap_lock pcpu_lock irq_context: 0 ebt_mutex &mm->mmap_lock percpu_counters_lock irq_context: 0 ebt_mutex &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#386 irq_context: 0 &data->open_mutex rfkill_global_mutex remove_cache_srcu irq_context: 0 &data->open_mutex rfkill_global_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &data->open_mutex rfkill_global_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#389 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex remove_cache_srcu &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex remove_cache_srcu &n->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: softirq (&app->join_timer)#2 &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&app->join_timer)#2 &base->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&app->join_timer)#2 &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &c->lock irq_context: 0 &sig->cred_guard_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sig->cred_guard_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &sig->cred_guard_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock sk_lock-AF_INET6 irq_context: 0 &disk->open_mutex &lo->lo_mutex &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock sk_lock-AF_INET6 slock-AF_INET6 irq_context: 0 &disk->open_mutex &nbd->config_lock &nsock->tx_lock slock-AF_INET6 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex dev_hotplug_mutex irq_context: 0 (wq_completion)nfc25_nci_tx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#390 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#385 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->lock nl_table_lock irq_context: 0 rtnl_mutex &br->lock nl_table_wait.lock irq_context: 0 rtnl_mutex &br->lock &br->multicast_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#5 irq_context: 0 (wq_completion)hci0#4 irq_context: 0 sk_lock-AF_NFC/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#178 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#42 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &meta->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu kfence_freelist_lock irq_context: 0 &ctx->cancel_lock cancel_lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->lock &c->lock irq_context: 0 &dev->lock &n->list_lock irq_context: 0 &dev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnettable->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#178 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 misc_mtx &dev->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 gdp_mutex &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex minor_rwsem#2 lock kernfs_idr_lock pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex &lo->lo_mutex &obj_hash[i].lock irq_context: 0 &disk->open_mutex &lo->lo_mutex pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock kfence_freelist_lock irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#5 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex key irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rnp->exp_wq[0] irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#843 irq_context: 0 uuid_mutex fs_reclaim irq_context: 0 uuid_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 uuid_mutex &c->lock irq_context: 0 uuid_mutex pool_lock#2 irq_context: 0 uuid_mutex rcu_read_lock &____s->seqcount#3 irq_context: 0 uuid_mutex rcu_read_lock &dentry->d_lock irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock kfence_freelist_lock irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &po->bind_lock irq_context: 0 fanout_mutex irq_context: 0 clock-AF_PACKET irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 uuid_mutex &type->i_mutex_dir_key#3 &dentry->d_lock &wq irq_context: 0 uuid_mutex rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 uuid_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#6 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sighand->siglock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)nfc23_nci_rx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#642 irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 tomoyo_ss pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 system_transition_mutex &____s->seqcount irq_context: 0 system_transition_mutex &mm->mmap_lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#863 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#501 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#862 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#4 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &fsnotify_mark_srcu &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex proc_subdir_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock _xmit_ETHER batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &idev->mc_lock _xmit_ETHER kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &vn->sock_lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)nfc23_nci_tx_wq#5 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#6 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#648 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#648 &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &crtc->commit_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock pool_lock irq_context: 0 (wq_completion)nfc22_nci_rx_wq#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#4 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &xt[i].mutex remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#3 irq_context: 0 tomoyo_ss remove_cache_srcu stock_lock irq_context: 0 tomoyo_ss remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#3 irq_context: 0 kn->active#21 fs_reclaim &rq->__lock irq_context: 0 kn->active#21 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 pcpu_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)nfc2_nci_tx_wq#384 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc24_nci_rx_wq#3 &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc24_nci_rx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_tx_wq#3 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#30 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &x->wait#15 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#23 &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#23 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#5 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &cfs_rq->removed.lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &base->lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount#2 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu &c->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu &n->list_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu pool_lock#2 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &br->lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#862 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#5 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock &p->pi_lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex quarantine_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &meta->lock irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 remove_cache_srcu &n->list_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &n->list_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock &____s->seqcount#5 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex kfence_freelist_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &____s->seqcount#2 irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &____s->seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock irq_context: 0 wq_pool_attach_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 wq_pool_attach_mutex &cfs_rq->removed.lock irq_context: 0 wq_pool_attach_mutex &obj_hash[i].lock irq_context: 0 wq_pool_attach_mutex pool_lock#2 irq_context: 0 wq_pool_attach_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (&timer.timer) irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (work_completion)(&vkms_state->composer_work)#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &____s->seqcount irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex crypto_alg_sem irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)nfc26_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#4 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &sem->wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)nfc27_nci_rx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#656 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_tx_wq#6 irq_context: 0 rtnl_mutex &wg->device_update_lock fs_reclaim &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#3 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#3 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &c->lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (kmod_concurrent_max).lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 uuid_mutex &rq->__lock irq_context: 0 &f->f_pos_lock &p->lock batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &p->lock kfence_freelist_lock irq_context: 0 uuid_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NFC &mm->mmap_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 loop_validate_mutex &lo->lo_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex fs_reclaim irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_VSOCK &cfs_rq->removed.lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) kfence_freelist_lock irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) &meta->lock irq_context: 0 sk_lock-AF_VSOCK remove_cache_srcu &c->lock irq_context: 0 sk_lock-AF_VSOCK remove_cache_srcu &n->list_lock irq_context: 0 sk_lock-AF_VSOCK remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &sbi->s_writepages_rwsem quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_read_lock &rq->__lock irq_context: 0 kn->active#4 &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex rcu_node_0 irq_context: 0 cb_lock genl_mutex &nbd->config_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &meta->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &obj_hash[i].lock irq_context: softirq rcu_callback stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc28_nci_rx_wq#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) pool_lock#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_tx_wq#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#661 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#661 irq_context: 0 &dev->master_mutex &lock->wait_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock kfence_freelist_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock key irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 key irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#3 irq_context: 0 sk_lock-AF_INET &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&peer->timer_new_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&peer->timer_new_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu percpu_counters_lock irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &x->wait#16 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#3 irq_context: 0 &mm->mmap_lock rcu_read_lock pool_lock irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#3 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_TIPC fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &list->lock#14 &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock &list->lock#14 pool_lock#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex running_helpers_waitq.lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC pcpu_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC percpu_counters_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC pcpu_lock stock_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex crypto_alg_sem irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem fs_reclaim irq_context: 0 (wq_completion)vsock-loopback (work_completion)(&vsock->pkt_work) sk_lock-AF_VSOCK quarantine_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cgroup_threadgroup_rwsem (console_sem).lock irq_context: 0 cgroup_threadgroup_rwsem console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem console_lock console_srcu console_owner irq_context: 0 cgroup_threadgroup_rwsem console_lock console_srcu console_owner &port_lock_key irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock kernfs_notify_lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &c->lock irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#5 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem pool_lock#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem kthread_create_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &p->pi_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &p->pi_lock &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &x->wait irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem key irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &obj_hash[i].lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem pcpu_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem percpu_counters_lock irq_context: 0 cgroup_threadgroup_rwsem console_lock console_srcu console_owner console_owner_lock irq_context: 0 cgroup_threadgroup_rwsem kernfs_pr_cont_lock irq_context: 0 cgroup_threadgroup_rwsem kernfs_pr_cont_lock kernfs_rename_lock irq_context: 0 cgroup_threadgroup_rwsem kernfs_pr_cont_lock (console_sem).lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock kernfs_notify_lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock &base->lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_file_kn_lock &base->lock &obj_hash[i].lock irq_context: softirq (&cfile->notify_timer) irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 cb_lock genl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock &obj_hash[i].lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock &base->lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock &base->lock &obj_hash[i].lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&cfile->notify_timer) cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)nfc31_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#5 irq_context: 0 sb_writers#4 key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &x->wait#2 irq_context: 0 sb_writers#4 pcpu_lock irq_context: 0 sb_writers#4 percpu_counters_lock irq_context: 0 sb_writers#4 &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#3 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &x->wait#20 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#3 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &base->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &base->lock &obj_hash[i].lock irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)nfc30_nci_tx_wq#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) quarantine_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &ids->rwsem &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex gdp_mutex &sem->wait_lock irq_context: 0 &type->i_mutex_dir_key/1 &c->lock irq_context: 0 &type->i_mutex_dir_key/1 &n->list_lock irq_context: 0 &type->i_mutex_dir_key/1 &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &meta->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (&timer.timer) irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &n->list_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &meta->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &n->list_lock &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock batched_entropy_u8.lock crngs.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#3 irq_context: 0 sb_writers#3 &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex batched_entropy_u8.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex kfence_freelist_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &meta->lock irq_context: 0 (wq_completion)nfc29_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#384 irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)wg-kex-wg2#296 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#666 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#666 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#666 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#366 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu stock_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu key irq_context: 0 &mm->mmap_lock remove_cache_srcu pcpu_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu percpu_counters_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#669 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#669 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#593 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu quarantine_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &pcp->lock &zone->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &____s->seqcount irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &n->list_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &n->list_lock &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem quarantine_lock irq_context: 0 (crypto_chain).rwsem &n->list_lock irq_context: 0 (crypto_chain).rwsem &n->list_lock &c->lock irq_context: 0 pfkey_mutex pfkey_mutex.wait_lock irq_context: 0 &x->wait#28 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) &sqd->lock &lock->wait_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &c->lock irq_context: 0 &x->wait#30 &p->pi_lock irq_context: 0 &x->wait#30 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#30 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &____s->seqcount#2 irq_context: 0 (crypto_chain).rwsem &____s->seqcount#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex key irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex pcpu_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex percpu_counters_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &pcp->lock &zone->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 cb_lock rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#384 irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override stock_lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#383 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#177 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#177 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#174 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#175 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#172 irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#173 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#3 irq_context: 0 &dev->clientlist_mutex &helper->lock &lock->wait_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#175 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#175 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#175 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#175 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#175 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#176 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#179 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override key irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex pcpu_lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock stock_lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override pcpu_lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override percpu_counters_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu pool_lock#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &cfs_rq->removed.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &____s->seqcount irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex &p->pi_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &p->pi_lock &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pfkey_mutex.wait_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#179 irq_context: 0 &x->wait#20 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &rq->__lock irq_context: 0 (crypto_chain).rwsem quarantine_lock irq_context: 0 bpf_devs_lock &n->list_lock irq_context: 0 bpf_devs_lock &n->list_lock &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 pfkey_mutex.wait_lock irq_context: 0 pfkey_mutex &cfs_rq->removed.lock irq_context: 0 pfkey_mutex &obj_hash[i].lock irq_context: 0 pfkey_mutex pool_lock#2 irq_context: 0 (crypto_chain).rwsem remove_cache_srcu irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 system_transition_mutex device_hotplug_lock &mm->mmap_lock irq_context: 0 (crypto_chain).rwsem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (crypto_chain).rwsem remove_cache_srcu &c->lock irq_context: 0 (crypto_chain).rwsem remove_cache_srcu &n->list_lock irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock rcu_node_0 irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#177 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &port_dev->status_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (crypto_chain).rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (crypto_chain).rwsem remove_cache_srcu pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rcu_state.expedited_wq irq_context: 0 &dev->master_mutex batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex kfence_freelist_lock irq_context: 0 &dev->master_mutex &meta->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &____s->seqcount irq_context: 0 crypto_alg_sem &rq->__lock irq_context: 0 crypto_alg_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &n->list_lock &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex &rq->__lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &dev->master_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#40 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#40 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#40 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &fs->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &fs->lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &dentry->d_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#39 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#5 irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem batched_entropy_u8.lock irq_context: 0 (wq_completion)mld &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex percpu_counters_lock irq_context: 0 misc_mtx nfc_devlist_mutex pcpu_lock stock_lock irq_context: 0 (crypto_chain).rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &meta->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#315 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 irq_context: 0 (wq_completion)wg-crypt-wg0#154 irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock stock_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock key irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#5 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pid->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#304 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 kn->active#52 &____s->seqcount#2 irq_context: 0 kn->active#52 &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)events_unbound (work_completion)(&ctx->exit_work) sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &c->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_sk_list.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &n->list_lock &c->lock irq_context: 0 rtnl_mutex &br->lock &br->multicast_lock &base->lock irq_context: 0 rtnl_mutex &br->lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock &n->list_lock irq_context: 0 rtnl_mutex &br->lock &n->list_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->hash_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &bond->stats_lock/1 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#2 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#386 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#386 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire rcu_node_0 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#385 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#180 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#180 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#177 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#16 irq_context: 0 &hdev->req_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#178 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#41 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#41 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#41 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#40 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#4 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#16 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#2 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#316 irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &obj_hash[i].lock pool_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex crypto_alg_sem &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex crypto_alg_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu quarantine_lock irq_context: 0 (work_completion)(&local->timeout_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu &n->list_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1131 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1131 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1131 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (crypto_chain).rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &x->wait#20 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &wg->device_update_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rcu_state.expedited_wq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 kn->active#23 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#23 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &base->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#863 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#503 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#504 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &mm->mmap_lock &cache->free_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) &x->wait#2 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 lock map_idr_lock &____s->seqcount#2 irq_context: 0 lock map_idr_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 lock map_idr_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#866 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#866 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#866 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 system_transition_mutex device_hotplug_lock &mm->mmap_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&map->work) stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem remove_cache_srcu irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem remove_cache_srcu quarantine_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem remove_cache_srcu &c->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem remove_cache_srcu &n->list_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 rcu_node_0 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock &rnp->exp_wq[3] irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem quarantine_lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &base->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 system_transition_mutex device_hotplug_lock swap_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (crypto_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (crypto_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &hub->status_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sighand->siglock quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_node_0 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex pfkey_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &____s->seqcount irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#17 &rq->__lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 pfkey_mutex pfkey_mutex.wait_lock irq_context: 0 &root->kernfs_iattr_rwsem rcu_node_0 irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem &rq->__lock cpu_asid_lock irq_context: 0 pfkey_mutex rcu_read_lock &rq->__lock irq_context: 0 pfkey_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &rnp->exp_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 pfkey_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock pool_lock#2 irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#870 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#870 &rq->__lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#870 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#865 irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) krc.lock &base->lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_node_0 irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex rcu_node_0 irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &p->lock &of->mutex &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#159 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (crypto_chain).rwsem remove_cache_srcu &____s->seqcount irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[2] irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &rq->__lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 sk_lock-AF_UNIX &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &dev->mutex &root->kernfs_rwsem &base->lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_UNIX &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &pcp->lock &zone->lock irq_context: 0 &sqd->lock cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#308 irq_context: 0 (wq_completion)wg-crypt-wg0#160 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#870 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#871 irq_context: softirq (&p->timer) &br->multicast_lock &____s->seqcount#2 irq_context: softirq (&p->timer) &br->multicast_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex rcu_node_0 irq_context: softirq rcu_callback batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#871 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#862 irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rcu_state.expedited_wq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#507 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex &nbd->config_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex &nbd->config_lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_rx_wq#503 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#501 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#873 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#874 irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 stock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 key irq_context: 0 sb_writers#7 &of->mutex kn->active#4 pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 percpu_counters_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#158 irq_context: 0 &sb->s_type->i_mutex_key#10 &dir->lock quarantine_lock irq_context: 0 cb_lock rtnl_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &base->lock irq_context: 0 sched_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#158 &rq->__lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM rcu_read_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &ctrl->lock#2 irq_context: 0 &mm->mmap_lock &rtpn->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#158 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#876 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#876 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#877 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#878 irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_probe_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#512 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#512 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#163 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#163 &devlink_port->type_lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#159 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnn->routes.lock &rq->__lock irq_context: 0 rlock-AF_INET6 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 &hdev->req_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &hdev->req_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &hdev->req_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#163 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#513 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex key irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex percpu_counters_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex remove_cache_srcu irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 system_transition_mutex console_lock console_srcu console_owner_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#882 irq_context: 0 (pm_chain_head).rwsem rcu_read_lock &rq->__lock irq_context: 0 (pm_chain_head).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#882 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#882 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#882 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#882 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#884 irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#884 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rcu_state.expedited_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#884 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 system_transition_mutex console_lock console_srcu console_owner irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (&peer->timer_retransmit_handshake) irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (&peer->timer_send_keepalive) irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (&peer->timer_new_handshake) irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (&peer->timer_zero_key_material) irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (&peer->timer_persistent_keepalive) irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (work_completion)(&peer->clear_peer_work) irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (wq_completion)wg-crypt-wg0#150 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wq->mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wq->mutex &pool->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wq->mutex &x->wait#10 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock napi_hash_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock (wq_completion)wg-kex-wg0#295 irq_context: 0 system_transition_mutex console_owner_lock irq_context: 0 system_transition_mutex console_owner irq_context: 0 system_transition_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &table->lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &list->lock#14 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &handshake->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &peer->keypairs.keypair_update_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#516 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#154 irq_context: 0 kn->active#51 &rq->__lock irq_context: 0 kn->active#51 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock key irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock pcpu_lock irq_context: 0 kn->active#47 &rq->__lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock percpu_counters_lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &net->xfrm.xfrm_cfg_mutex pfkey_mutex (crypto_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &disk->open_mutex &nbd->config_lock &rcu_state.expedited_wq irq_context: 0 &disk->open_mutex &nbd->config_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 kn->active#4 rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 &root->deactivate_waitq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#305 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#146 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex key irq_context: 0 rtnl_mutex uevent_sock_mutex pcpu_lock irq_context: 0 rtnl_mutex uevent_sock_mutex percpu_counters_lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg2#305 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#305 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock percpu_charge_mutex rcu_read_lock &pool->lock irq_context: 0 &mm->mmap_lock percpu_charge_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_VSOCK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 rtnl_mutex wq_pool_mutex &rq->__lock irq_context: 0 rtnl_mutex wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#885 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#163 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#163 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#888 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#888 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#889 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#889 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#890 irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#146 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#890 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#520 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#520 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#894 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#894 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#895 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#895 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#165 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#165 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#165 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#897 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#897 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#897 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#164 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#898 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#898 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#899 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#899 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#899 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#526 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#526 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#900 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#900 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#528 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#528 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#529 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#904 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#904 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#904 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#907 irq_context: 0 kn->active#22 remove_cache_srcu irq_context: 0 kn->active#22 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#22 remove_cache_srcu &c->lock irq_context: 0 kn->active#22 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#22 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#907 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#530 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#530 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#906 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#168 irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#911 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 &mm->mmap_lock percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock percpu_charge_mutex stock_lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &cache->free_lock &p->lock#2 &xa->xa_lock#23 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem lock#4 &lruvec->lru_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 &xa->xa_lock#23 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 &xa->xa_lock#23 pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#533 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#529 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#914 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#915 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#915 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#917 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#170 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#917 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#919 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#537 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#920 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#920 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#920 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#920 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#921 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#538 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#538 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#924 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#924 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#539 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#925 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#924 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#924 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#540 irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#927 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#927 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#541 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#927 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#929 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#929 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#929 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#931 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#932 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#932 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#933 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#934 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#5 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx system_transition_mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx system_transition_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#5 irq_context: 0 &f->f_pos_lock sb_writers#4 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#5 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock &c->lock irq_context: 0 &f->f_pos_lock sb_writers#4 sysctl_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 rcu_read_lock_bh _xmit_NONE#2 irq_context: 0 &f->f_pos_lock sb_writers#4 fs_reclaim irq_context: 0 rcu_read_lock_bh _xmit_NONE#2 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock sb_writers#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rcu_read_lock_bh _xmit_NONE#2 pool_lock#2 irq_context: 0 &f->f_pos_lock sb_writers#4 &mm->mmap_lock irq_context: 0 &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#5 irq_context: 0 &f->f_pos_lock sb_writers#4 &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) pool_lock irq_context: 0 (wq_completion)nfc14_nci_rx_wq#5 irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &lru->node[i].lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#7 &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci5 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX chan_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &x->wait#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex device_links_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 quarantine_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#285 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &disk->open_mutex &nbd->config_lock stock_lock irq_context: 0 &disk->open_mutex &nbd->config_lock key irq_context: 0 &disk->open_mutex &nbd->config_lock pcpu_lock irq_context: 0 &disk->open_mutex &nbd->config_lock percpu_counters_lock irq_context: 0 &disk->open_mutex &nbd->config_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 misc_mtx system_transition_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 nfnl_subsys_ctnetlink fs_reclaim irq_context: 0 nfnl_subsys_ctnetlink fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nfnl_subsys_ctnetlink &c->lock irq_context: 0 nfnl_subsys_ctnetlink &n->list_lock irq_context: 0 nfnl_subsys_ctnetlink &n->list_lock &c->lock irq_context: 0 nfnl_subsys_ctnetlink pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 nfnl_subsys_ctnetlink rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 nfnl_subsys_ctnetlink rlock-AF_NETLINK irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (work_completion)(&local->sdreq_timeout_work) &rq->__lock irq_context: 0 (work_completion)(&local->sdreq_timeout_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#316 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#43 irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER stock_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rnp->exp_wq[2] irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER pcpu_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER percpu_counters_lock irq_context: 0 nfnl_subsys_ctnetlink nlk_cb_mutex-NETFILTER pcpu_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX pool_lock#2 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#43 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#43 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#393 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#43 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#395 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#396 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#396 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#397 irq_context: softirq rcu_read_lock hwsim_radio_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#398 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#399 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#401 irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#402 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#403 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#404 irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#404 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#191 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#405 irq_context: 0 &dev->mutex &root->kernfs_rwsem &meta->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 cb_lock put_task_map-wait-type-override#5 &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#405 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 &nbd->config_lock stock_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 &nbd->config_lock pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#192 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#406 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#407 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#408 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#408 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#407 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#409 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#410 &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#196 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock hwsim_radio_lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#197 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#195 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#416 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#416 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#416 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#416 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#416 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#415 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#200 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#200 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#196 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#418 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#418 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#51 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_owner irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#419 irq_context: 0 &f->f_pos_lock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock key irq_context: 0 &f->f_pos_lock &mm->mmap_lock pcpu_lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock percpu_counters_lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#202 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#202 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#198 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#198 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#198 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#198 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#421 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#421 &rq->__lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#421 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#421 irq_context: 0 &nbd->config_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#421 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#420 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#420 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#420 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#53 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#53 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#52 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#51 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#51 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#51 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#422 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#422 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#422 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#421 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#423 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &base->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#423 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#423 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#422 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#203 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#203 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#203 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#203 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#199 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#199 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#54 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#54 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#53 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#52 irq_context: 0 &dev->mutex kn->active#4 &cfs_rq->removed.lock irq_context: 0 &dev->mutex kn->active#4 &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &mm->mmap_lock kfence_freelist_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex kn->active#4 pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&w->w) &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#21 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &base->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &____s->seqcount#2 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &____s->seqcount irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock hwsim_radio_lock kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_rx_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#4 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &____s->seqcount#2 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &nbd->config_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#20 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex kfence_freelist_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) fs_reclaim &rq->__lock irq_context: 0 sb_writers#4 &rcu_state.expedited_wq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) remove_cache_srcu irq_context: 0 rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#4 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#424 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#425 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#425 &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#425 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_NFC &c->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 &mm->mmap_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &____s->seqcount#2 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->rx_work) &hdev->lock &____s->seqcount irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &sb->s_type->i_lock_key#22 bit_wait_table + i irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci5#2 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&conn->pending_rx_work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#2 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#425 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#205 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#202 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#202 irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 &sqd->lock cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem key irq_context: 0 &dev->mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 &f->f_pos_lock stock_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#207 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#204 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#55 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#429 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#429 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#430 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#207 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#433 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#433 irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#433 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#433 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#433 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#432 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#210 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#435 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#435 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#438 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#438 &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rnp->exp_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rnp->exp_wq[2] irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#439 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#220 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#441 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rnp->exp_wq[1] irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rnp->exp_wq[0] irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem stock_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem key irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem pcpu_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem percpu_counters_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#23 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#23 pool_lock#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 &data->open_mutex rfkill_global_mutex dpm_list_mtx &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock pool_lock irq_context: 0 &data->open_mutex rfkill_global_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#163 irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#23 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#441 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#441 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#440 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#442 irq_context: softirq rcu_read_lock hwsim_radio_lock &zone->lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#442 &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#443 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#444 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#443 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#443 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#443 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#445 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#444 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#221 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#226 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 nfc_devlist_mutex gdp_mutex gdp_mutex.wait_lock irq_context: 0 nfc_devlist_mutex gdp_mutex &rq->__lock irq_context: 0 nfc_devlist_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#2 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#226 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#226 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#226 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#449 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#449 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#227 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#227 &rq->__lock irq_context: 0 slock-AF_PPPOX irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#60 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#59 irq_context: 0 (wq_completion)events (work_completion)(&nlk->work) quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#452 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#228 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#453 irq_context: 0 &pipe->wr_wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#454 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#455 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#456 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#232 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#232 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#228 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#458 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#458 &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &n->list_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->cmd_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#458 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#460 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#233 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#233 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#229 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &net->xdp.lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &net->xdp.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &____s->seqcount irq_context: 0 &disk->open_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &ndev->req_lock &wq->mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock &wq->mutex &obj_hash[i].lock irq_context: 0 &ndev->req_lock &wq->mutex pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#163 irq_context: 0 rtnl_mutex team->team_lock_key#163 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#163 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#163 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#163 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#163 net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#163 &tn->lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 rtnl_mutex team->team_lock_key#163 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#163 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#163 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#163 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#163 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#163 &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex team->team_lock_key#163 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#163 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#163 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#163 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#163 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#163 &in_dev->mc_tomb_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#163 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#163 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 lock irq_context: 0 dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#163 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#163 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#163 &____s->seqcount#2 irq_context: 0 rtnl_mutex team->team_lock_key#163 &____s->seqcount irq_context: 0 rtnl_mutex team->team_lock_key#163 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#163 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#163 (console_sem).lock irq_context: 0 rtnl_mutex team->team_lock_key#163 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#163 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#163 console_lock console_srcu console_owner console_owner_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#678 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#678 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &nsim_trap_data->trap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#593 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#678 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock krc.lock &base->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &u->iolock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#163 _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#163 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#228 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#228 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#462 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#462 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#236 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#678 irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex batched_entropy_u8.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex kfence_freelist_lock irq_context: 0 &nbd->config_lock console_owner_lock irq_context: 0 &nbd->config_lock console_owner irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 rcu_read_lock &ndev->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#163 &tn->lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#672 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#672 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#672 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#672 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#672 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#672 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#674 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#674 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#674 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#369 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock &base->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#698 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#698 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#746 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#677 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#677 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#677 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#678 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#681 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#681 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#236 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#465 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#469 irq_context: 0 misc_mtx pcpu_alloc_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx pcpu_alloc_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#469 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#469 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#469 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#469 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#64 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#239 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#239 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#470 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#470 &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)bond0#165 irq_context: 0 (wq_completion)bond0#165 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#165 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#165 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#165 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#843 &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_hook_mutex remove_cache_srcu irq_context: 0 nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#472 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#475 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#475 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#243 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#476 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#165 irq_context: 0 rtnl_mutex team->team_lock_key#165 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#165 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#165 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#165 net_rwsem irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem rcu_read_lock &new->lock#2 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#681 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#679 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#679 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#679 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#115 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#681 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#377 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#378 irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#165 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 _xmit_ETHER irq_context: 0 (wq_completion)events deferred_process_work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events deferred_process_work &obj_hash[i].lock irq_context: 0 (wq_completion)events deferred_process_work pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#165 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#165 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#165 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&vmpr->work) &vmpr->sr_lock irq_context: 0 (wq_completion)events (work_completion)(&vmpr->work) &vmpr->events_lock irq_context: 0 &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#50 &lru->node[i].lock irq_context: 0 &type->s_umount_key#43 &lru->node[i].lock irq_context: 0 &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &anon_vma->rwsem ptlock_ptr(ptdesc)#2 irq_context: 0 swap_slots_cache_mutex irq_context: 0 &cache->alloc_lock irq_context: 0 &cache->alloc_lock swap_avail_lock irq_context: 0 &cache->alloc_lock &p->lock#2 irq_context: 0 &cache->alloc_lock &p->lock#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &cache->alloc_lock &p->lock#2 batched_entropy_u32.lock irq_context: 0 shmem_swaplist_mutex irq_context: 0 shmem_swaplist_mutex &xa->xa_lock#23 irq_context: 0 shmem_swaplist_mutex &xa->xa_lock#23 &c->lock irq_context: 0 shmem_swaplist_mutex &xa->xa_lock#23 pool_lock#2 irq_context: 0 shmem_swaplist_mutex &info->lock irq_context: 0 shmem_swaplist_mutex &sb->s_type->i_lock_key irq_context: 0 shmem_swaplist_mutex &((cluster_info + ci)->lock)#2 irq_context: 0 shmem_swaplist_mutex &xa->xa_lock#9 irq_context: 0 &((cluster_info + ci)->lock)#2 irq_context: 0 &tree->lock irq_context: 0 &acomp_ctx->mutex irq_context: 0 &acomp_ctx->mutex scomp_scratch.lock irq_context: 0 &acomp_ctx->mutex &____s->seqcount#2 irq_context: 0 &acomp_ctx->mutex &____s->seqcount irq_context: 0 &acomp_ctx->mutex &c->lock irq_context: 0 &acomp_ctx->mutex pool_lock#2 irq_context: 0 &acomp_ctx->mutex &pool->lock#3 irq_context: 0 &acomp_ctx->mutex &pool->lock#3 &zspage->lock irq_context: 0 &acomp_ctx->mutex &zspage->lock irq_context: 0 &acomp_ctx->mutex &zspage->lock lock#10 irq_context: 0 &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 &xa->xa_lock#23 irq_context: 0 &xa->xa_lock#23 &ctrl->lock#2 irq_context: 0 &type->s_umount_key#30 &lru->node[i].lock irq_context: hardirq|softirq lock#11 irq_context: 0 &type->s_umount_key#50 &dentry->d_lock irq_context: 0 &type->s_umount_key#50 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#50 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#50 &dentry->d_lock pool_lock#2 irq_context: 0 &type->s_umount_key#43 &dentry->d_lock irq_context: 0 &type->s_umount_key#43 &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#43 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#43 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#43 inode_hash_lock irq_context: 0 &type->s_umount_key#43 inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#43 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#43 pool_lock#2 irq_context: 0 &type->s_umount_key#43 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#43 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#43 &dentry->d_lock &obj_hash[i].lock irq_context: 0 lock#4 lock#11 irq_context: 0 lock#4 lock#11 &lruvec->lru_lock irq_context: 0 &xa->xa_lock#23 key#29 irq_context: 0 &type->s_umount_key#30 &dentry->d_lock irq_context: 0 &type->s_umount_key#30 &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#30 &sb->s_type->i_lock_key#22 &lru->node[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &type->s_umount_key#30 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &type->s_umount_key#30 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#30 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#30 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#30 &ei->i_es_lock irq_context: 0 &type->s_umount_key#30 inode_hash_lock irq_context: 0 &type->s_umount_key#30 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#30 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 pool_lock#2 irq_context: 0 &type->s_umount_key#30 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#30 &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#30 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &type->s_umount_key#30 fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_lock_key &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_lock_key &xa->xa_lock#9 pool_lock#2 irq_context: 0 &rtpn->lock irq_context: 0 &anon_vma->rwsem mmu_notifier_invalidate_range_start irq_context: 0 &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 mmlist_lock irq_context: 0 &mm->mmap_lock &((cluster_info + ci)->lock)#2 irq_context: 0 &mm->mmap_lock &xa->xa_lock#23 irq_context: 0 &mm->mmap_lock &xa->xa_lock#23 key#29 irq_context: 0 &mm->mmap_lock &ctrl->lock#2 irq_context: 0 &mm->mmap_lock &tree->lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &pool->lock#3 irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &pool->lock#3 &zspage->lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &zspage->lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &zspage->lock lock#10 irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex scomp_scratch.lock irq_context: 0 &mm->mmap_lock &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &vma->vm_lock->lock &((cluster_info + ci)->lock)#2 irq_context: 0 &vma->vm_lock->lock cgroup_file_kn_lock irq_context: 0 &vma->vm_lock->lock &vmpr->sr_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pool->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 percpu_charge_mutex rcu_read_lock &pool->lock irq_context: 0 percpu_charge_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 percpu_charge_mutex stock_lock irq_context: 0 (wq_completion)events (work_completion)(&({ do { const void *__vpp_verify = (typeof((&memcg_stock) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&memcg_stock))) *)((&memcg_stock))); (typeof((typeof(*((&memcg_stock))) *)((&memcg_stock)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)events (work_completion)(&({ do { const void *__vpp_verify = (typeof((&memcg_stock) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&memcg_stock))) *)((&memcg_stock))); (typeof((typeof(*((&memcg_stock))) *)((&memcg_stock)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) stock_lock irq_context: 0 &vma->vm_lock->lock lock#5 irq_context: 0 &vma->vm_lock->lock &mapping->i_private_lock irq_context: 0 &tree->lock &pool->lock#3 irq_context: 0 &tree->lock &pool->lock#3 &obj_hash[i].lock irq_context: 0 &tree->lock &pool->lock#3 pool_lock#2 irq_context: 0 &tree->lock &obj_hash[i].lock irq_context: 0 &tree->lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock cgroup_rstat_lock irq_context: 0 &vma->vm_lock->lock cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#23 irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#23 key#29 irq_context: 0 &vma->vm_lock->lock &ctrl->lock#2 irq_context: 0 &vma->vm_lock->lock &tree->lock irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex &pool->lock#3 irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex &pool->lock#3 &zspage->lock irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex &zspage->lock irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex &zspage->lock lock#10 irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex scomp_scratch.lock irq_context: 0 &vma->vm_lock->lock &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 &xa->xa_lock#23 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 nl_table_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &acomp_ctx->mutex &zspage->lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &acomp_ctx->mutex &zspage->lock lock#10 irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &acomp_ctx->mutex scomp_scratch.lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock lock#5 irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &xa->xa_lock#23 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &lruvec->lru_lock irq_context: 0 &mm->mmap_lock &((cluster_info + ci)->lock)#2 irq_context: 0 &mm->mmap_lock &xa->xa_lock#23 irq_context: 0 &mm->mmap_lock &cache->free_lock irq_context: 0 mmlist_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &retval->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 &vma->vm_lock->lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->i_mutex_dir_key#3 smack_known_lock irq_context: 0 &type->i_mutex_dir_key#3 smack_known_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 smack_known_lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock &cache->free_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &xa->xa_lock#23 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#165 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_callback &rtpn->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#165 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#165 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 net_rwsem &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#165 net_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex team->team_lock_key#165 net_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#165 net_rwsem pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#165 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 lock irq_context: 0 &acomp_ctx->mutex &rq->__lock irq_context: 0 &acomp_ctx->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xa->xa_lock#23 &rtpn->lock irq_context: 0 &acomp_ctx->mutex rcu_read_lock rcu_node_0 irq_context: 0 &acomp_ctx->mutex rcu_read_lock &rq->__lock irq_context: 0 &acomp_ctx->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mapping->i_mmap_rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &type->s_umount_key#43 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock cgroup_file_kn_lock irq_context: 0 &mm->mmap_lock &vmpr->sr_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &pool->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_private_lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &lock->wait_lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &rq->__lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock cgroup_rstat_lock irq_context: 0 &mm->mmap_lock cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &vma->vm_lock->lock &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock &anon_vma->rwsem ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#23 &ctrl->lock#2 irq_context: 0 &vma->vm_lock->lock &anon_vma->rwsem mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock &anon_vma->rwsem rcu_read_lock ptlock_ptr(ptdesc)#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &vma->vm_lock->lock &tree->lock &pool->lock#3 irq_context: 0 &vma->vm_lock->lock &tree->lock &pool->lock#3 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &tree->lock &pool->lock#3 pool_lock#2 irq_context: 0 &vma->vm_lock->lock &tree->lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &tree->lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex pool_lock#2 irq_context: 0 &vma->vm_lock->lock &acomp_ctx->mutex &____s->seqcount irq_context: 0 &vma->vm_lock->lock rcu_read_lock &memcg->move_lock irq_context: 0 &vma->vm_lock->lock &cache->alloc_lock irq_context: 0 &xa->xa_lock#23 pool_lock#2 irq_context: 0 &vma->vm_lock->lock percpu_charge_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &vma->vm_lock->lock percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock percpu_charge_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 &xa->xa_lock#23 key#29 irq_context: 0 &vma->vm_lock->lock memcg_oom_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 memcg_oom_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock css_set_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock rcu_read_lock &p->alloc_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock (console_sem).lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock freezer_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock (console_sem).lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &base->lock irq_context: 0 &vma->vm_lock->lock memcg_oom_lock#2 oom_lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 oom_victims_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#165 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#165 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#165 &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &meta->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 lweventlist_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#165 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#165 (console_sem).lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#165 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#165 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#165 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#165 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#478 irq_context: 0 (wq_completion)bond0#165 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 &vma->vm_lock->lock lock#4 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &((cluster_info + ci)->lock)#2 irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &ctrl->lock#2 irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 pool_lock#2 irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &xa->xa_lock#23 irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock &meta->lock irq_context: 0 &vma->vm_lock->lock &cache->free_lock &p->lock#2 &tree->lock kfence_freelist_lock irq_context: 0 (wq_completion)bond0#165 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#165 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#478 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#684 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#684 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#685 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#381 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 memcg_oom_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock css_set_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock rcu_read_lock &p->alloc_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock (console_sem).lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &rq->__lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &u->peer_wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock kernfs_pr_cont_lock (console_sem).lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock cgroup_rstat_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock rcu_read_lock cgroup_file_kn_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock rcu_read_lock &sighand->siglock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock &sighand->siglock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock freezer_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock (console_sem).lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &p->alloc_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &base->lock irq_context: 0 &mm->mmap_lock memcg_oom_lock#2 oom_lock &base->lock &obj_hash[i].lock irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 rcu_read_lock &pool->lock irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&tsk->oom_reaper_timer) put_task_map-wait-type-override#4 stock_lock irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) stock_lock irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) key irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&mm->async_put_work) pool_lock#2 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#686 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#686 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#687 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#688 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &meta->lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#691 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#692 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#692 &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 cb_lock put_task_map-wait-type-override#5 pool_lock#2 irq_context: 0 cb_lock put_task_map-wait-type-override#5 stock_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &rtpn->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock_bh _xmit_X25#2 &lapbeth->up_lock quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem fill_pool_map-wait-type-override &c->lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#693 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#695 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#695 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#695 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem triggers_list_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#696 irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 sb_writers#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#385 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#115 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#700 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#700 &rq->__lock irq_context: 0 &type->s_umount_key#47 rcu_read_lock rcu_node_0 irq_context: 0 &type->s_umount_key#47 rcu_read_lock &rq->__lock irq_context: 0 &type->s_umount_key#47 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#47 &rcu_state.gp_wq irq_context: 0 &type->s_umount_key#47 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &type->s_umount_key#47 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#47 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#700 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#118 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#703 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#703 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &sem->wait_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &p->pi_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci4#2 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci4#2 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#703 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#393 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#705 &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#707 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#707 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#390 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#708 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#708 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#708 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#397 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#710 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#710 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#710 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#712 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#399 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#674 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#671 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#671 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#671 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1134 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1134 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1134 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#679 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#679 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#679 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#679 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#675 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pool_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#675 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#675 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#238 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#238 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#238 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#672 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1134 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1134 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1132 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#238 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#236 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#235 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1135 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1135 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1135 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#478 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#245 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1135 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1135 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1133 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1133 &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1133 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#680 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#680 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#680 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#680 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#676 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#673 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1136 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1136 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1136 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1136 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1136 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &wq->mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1134 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1137 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1137 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1137 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1137 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1137 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1135 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#681 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#681 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex &lo->lo_mutex rcu_node_0 irq_context: 0 &disk->open_mutex &lo->lo_mutex &rcu_state.expedited_wq irq_context: 0 &disk->open_mutex &lo->lo_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &disk->open_mutex &lo->lo_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex &lo->lo_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#681 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#677 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#677 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#677 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#674 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#674 &rq->__lock irq_context: 0 &kernfs_locks->open_file_mutex[count] quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 &c->lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex key irq_context: 0 vlan_ioctl_mutex rtnl_mutex pcpu_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex percpu_counters_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1138 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1138 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 nl_table_wait.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1138 &rq->__lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu quarantine_lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &n->list_lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &rq->__lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1136 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 remove_cache_srcu irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu pool_lock#2 irq_context: 0 &f->f_pos_lock &lock->wait_lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#23 irq_context: 0 sb_writers#3 &mm->mmap_lock &((cluster_info + ci)->lock)#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &ctrl->lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &tree->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex &pool->lock#3 irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex &pool->lock#3 &zspage->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex &zspage->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex &zspage->lock lock#10 irq_context: 0 sb_writers#3 &mm->mmap_lock &acomp_ctx->mutex scomp_scratch.lock irq_context: 0 sb_writers#3 &mm->mmap_lock &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 sb_writers#3 &mm->mmap_lock lock#5 irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#23 key#29 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &((cluster_info + ci)->lock)#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &ctrl->lock#2 irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock rcu_read_lock &lru->node[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &pool->lock#3 pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &xa->xa_lock#23 irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock &meta->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &cache->free_lock &p->lock#2 &tree->lock kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 &n->list_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#165 &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1139 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &n->list_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 nfc_devlist_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#23 &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#23 pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1139 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1139 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1139 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1139 irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1137 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &retval->lock irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#172 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1140 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1140 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1140 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1140 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1140 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1140 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1138 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1138 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1138 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#682 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#682 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#682 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#682 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#682 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#682 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#678 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#678 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#675 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#480 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#482 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#67 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#484 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#243 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#487 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#492 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1141 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1141 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1141 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1141 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1141 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1139 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fs_reclaim &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#400 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#400 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#400 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#400 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#394 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#715 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#716 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#716 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#716 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex gdp_mutex &p->pi_lock irq_context: 0 rtnl_mutex gdp_mutex &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex gdp_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#718 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#717 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#719 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#720 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#722 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#722 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#722 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#722 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#722 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#399 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#722 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#722 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#727 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#727 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#407 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#733 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#737 irq_context: 0 misc_mtx wq_pool_mutex wq_pool_mutex.wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#124 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#740 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#740 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#743 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#746 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#747 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#751 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#753 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#753 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#755 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#755 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#129 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#429 irq_context: 0 nfc_devlist_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 nfc_devlist_mutex mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#429 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#759 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#762 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#439 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#439 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#779 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#445 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#783 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#783 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#791 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#313 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#793 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#795 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#795 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#798 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#798 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#387 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#387 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#387 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#388 irq_context: 0 (wq_completion)wg-kex-wg1#305 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)nfc2_nci_rx_wq#388 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#388 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1142 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 map_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#314 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#388 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1142 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1142 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1142 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1142 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1140 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#683 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#683 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#679 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#676 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy338 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1143 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1143 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1143 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1143 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#306 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1143 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1141 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex leds_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#684 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#684 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#680 irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#677 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1144 irq_context: 0 rcu_read_lock &sighand->siglock batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1144 irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1144 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1142 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1142 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1142 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy338 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1145 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1145 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1145 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1145 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1145 irq_context: 0 (wq_completion)bond0#159 irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#159 irq_context: 0 rtnl_mutex team->team_lock_key#159 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#159 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#159 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#159 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#159 net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem stock_lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem key irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem pcpu_lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem percpu_counters_lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &tn->lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#159 &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#159 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#159 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#159 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &ndev->lock irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#159 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#159 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#159 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#159 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#159 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 lock irq_context: 0 rtnl_mutex team->team_lock_key#159 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#159 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#159 &____s->seqcount#2 irq_context: 0 rtnl_mutex team->team_lock_key#159 &____s->seqcount irq_context: 0 rtnl_mutex team->team_lock_key#159 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#159 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#159 (console_sem).lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle lock#4 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#159 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#159 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#159 console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#159 net_rwsem &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#159 net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx fs_reclaim &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1145 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1145 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1143 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1143 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1143 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1143 &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &base->lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg2#142 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#295 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#296 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy337 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &sem->wait_lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#387 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#387 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#302 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#387 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#303 irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#389 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#389 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#389 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#389 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#806 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock key#28 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 net_rwsem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 net_rwsem &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#159 &tn->lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#303 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#806 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#817 irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg0#314 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#159 (work_completion)(&(&slave->notify_work)->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#302 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#823 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#826 irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#150 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#475 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#831 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &nbd->config_lock rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#836 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#847 &rq->__lock irq_context: 0 (wq_completion)hci3 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc34_nci_tx_wq#4 irq_context: 0 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#5 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 pcpu_alloc_mutex irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#181 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#6 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#545 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#492 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#491 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#494 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#498 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#498 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#498 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#255 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#255 irq_context: 0 rcu_read_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#25 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#25 irq_context: 0 &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#501 irq_context: 0 &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#256 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#564 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#503 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#503 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#503 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#502 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#77 irq_context: 0 &mm->mmap_lock &cache->alloc_lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#77 &rq->__lock irq_context: 0 &mm->mmap_lock &acomp_ctx->mutex &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &memcg->move_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#77 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#77 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#76 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#75 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#504 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#78 irq_context: 0 (wq_completion)events (work_completion)(&vmpr->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&vmpr->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#78 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#77 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#507 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#260 irq_context: 0 nfc_devlist_mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#260 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#260 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#260 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#261 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#262 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#509 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#511 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#511 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#511 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#513 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#513 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#266 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#513 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#516 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#516 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#268 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#517 irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#521 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#522 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#523 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#524 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock irq_context: 0 &ep->mtx &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &ep->mtx &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &ep->mtx &mm->mmap_lock pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#273 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#274 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#44 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#44 &rq->__lock irq_context: 0 (wq_completion)hci0#3 irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &rq->__lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: softirq &c->lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#44 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#44 irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &tb->tb6_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) remove_cache_srcu irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) remove_cache_srcu &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) remove_cache_srcu irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) remove_cache_srcu &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &c->lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &n->list_lock &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem inode_hash_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &sb->s_type->i_lock_key#31 &dentry->d_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock key irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex nl_table_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex failover_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &xa->xa_lock#19 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &xa->xa_lock#19 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 &xa->xa_lock#19 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex bpf_devs_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#165 rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#165 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#165 &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#165 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#165 &devlink_port->type_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#8 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 &ids->rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &ids->rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 &ids->rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci0#3 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &____s->seqcount#2 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->rx_work) &hdev->lock &____s->seqcount irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci0#4 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 (wq_completion)hci0#4 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci0#4 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1064 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1064 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1067 irq_context: 0 misc_mtx (wq_completion)nfc4_nci_cmd_wq#223 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#13 irq_context: 0 (wq_completion)wg-kex-wg1#299 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#639 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#526 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#85 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#84 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#83 irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#527 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy337 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1146 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#527 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#181 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#181 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#527 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#277 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#530 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#280 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#531 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1074 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#228 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#531 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#532 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#281 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#533 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#593 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#592 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#594 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#460 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#463 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#817 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#817 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#821 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#821 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#148 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#823 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#824 irq_context: 0 &mm->mmap_lock &mm->page_table_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#153 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#477 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#836 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#837 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#477 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#842 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#851 irq_context: 0 free_vmap_area_lock &meta->lock irq_context: 0 free_vmap_area_lock kfence_freelist_lock irq_context: 0 (wq_completion)hci3 &rq->__lock irq_context: 0 (wq_completion)hci3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#2 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock rcu_node_0 irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex &base->lock irq_context: 0 &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex bpf_devs_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#159 &____s->seqcount irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &n->list_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#545 irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &c->lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &n->list_lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->power_on) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock hci_cb_list_lock.wait_lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci3 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci3#2 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 cb_lock rtnl_mutex lweventlist_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_node_0 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci3#2 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#851 irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)bond0#163 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#851 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#497 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#497 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#286 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#87 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#538 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#540 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#541 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#542 irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#291 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#291 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#544 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#544 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock stock_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pcpu_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#544 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#545 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#287 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#285 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#293 irq_context: 0 cb_lock rtnl_mutex &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &rtpn->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#549 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#551 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#552 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#288 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#554 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#554 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#555 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#295 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#556 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#90 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#163 rtnl_mutex &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#557 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#558 irq_context: 0 &hdev->req_lock (wq_completion)hci3#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#559 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#559 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#298 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#560 &rq->__lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#561 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#300 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#300 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#562 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#301 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#563 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#565 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#565 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#594 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#594 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#594 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#546 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#546 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#938 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#937 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#547 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#547 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#594 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#593 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#318 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#318 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#314 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#568 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#569 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#312 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#306 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#312 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#307 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#307 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#575 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_crypto irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#576 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#576 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#576 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#577 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#579 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#581 irq_context: 0 (wq_completion)wg-crypt-wg2#151 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#583 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#584 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#312 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1100 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1100 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#658 irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#658 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#658 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#658 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#654 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#595 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#588 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#588 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#588 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#315 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#589 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#589 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#595 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#595 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#99 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#99 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#98 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#97 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#594 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#596 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#319 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#320 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#320 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy340 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#100 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#603 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#603 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#603 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#602 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#604 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#654 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#652 irq_context: 0 (wq_completion)bond0#168 irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)bond0#168 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#607 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1103 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#607 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#325 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#609 irq_context: 0 &dev->mutex uevent_sock_mutex &base->lock irq_context: 0 &dev->mutex uevent_sock_mutex &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#609 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#609 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#624 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#335 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#335 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#625 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#336 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#337 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg1#154 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#307 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#626 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#628 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#336 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#629 irq_context: 0 (wq_completion)wg-kex-wg1#289 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#317 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#632 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#108 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#108 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#107 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#638 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#638 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#638 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#346 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#640 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#644 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#644 &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#644 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#645 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#646 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#645 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#646 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#646 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#352 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#650 irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#163 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#650 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#650 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#650 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#649 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#354 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#111 irq_context: 0 nfc_devlist_mutex kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#652 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#652 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#652 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#653 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#653 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#655 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#656 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#110 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#358 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#358 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#354 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#659 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#659 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#659 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#658 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#660 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#660 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#661 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#660 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#662 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#662 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#362 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#362 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#358 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#356 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#664 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#665 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#666 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#361 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#366 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#362 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#668 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#367 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#669 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#668 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#671 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#596 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#596 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#595 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#725 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#406 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#729 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#730 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#730 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#731 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#733 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#297 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 misc_mtx remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#307 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#733 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#735 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#735 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#735 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#734 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#737 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#737 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#740 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#740 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#740 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#739 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#742 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#742 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#742 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#126 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#126 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#741 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#125 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#32 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#32 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#32 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#743 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#743 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#33 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#33 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#32 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#32 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#32 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#744 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#744 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#746 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#420 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#747 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#749 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#751 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#751 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#751 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#751 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#424 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#424 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#427 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#757 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#757 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#759 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#758 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#430 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#761 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#761 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#762 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#175 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#178 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#544 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#544 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#763 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#764 irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#434 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#434 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#767 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#767 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#436 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#436 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#432 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#430 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#544 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#542 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#768 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#768 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#768 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#431 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#942 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#769 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#772 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#549 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#441 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#434 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#775 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#774 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#441 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#777 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#778 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#779 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#779 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#445 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#781 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#782 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#782 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#784 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#785 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#786 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#786 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#787 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#791 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#791 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#788 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#787 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#448 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#790 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#450 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#450 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#792 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#793 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#451 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#795 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#795 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#795 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#794 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#452 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#797 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#797 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#797 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#798 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#798 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#798 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#797 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#801 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#801 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#801 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#456 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#456 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#803 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#803 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &c->lock irq_context: 0 (wq_completion)bond0#169 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#169 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#169 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#169 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#806 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#805 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#453 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#460 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#460 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#809 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#334 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#330 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#328 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#624 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1103 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#624 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#623 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#331 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#331 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#329 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#625 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#625 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#624 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#336 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#332 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#330 irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc39_nci_rx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc36_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#337 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#333 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#331 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#331 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#106 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#179 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#179 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#178 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#547 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#556 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#557 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#809 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#36 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#36 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#35 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#462 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#810 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#810 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#461 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#812 &rq->__lock irq_context: 0 (wq_completion)bond0#169 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#169 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#169 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#812 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#463 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#464 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#458 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#817 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#816 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#816 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#146 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#466 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#466 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#462 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#460 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#820 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#468 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#468 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#148 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#824 &rq->__lock irq_context: 0 &dev->mutex kn->active#4 rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex kn->active#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#826 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#826 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#826 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#472 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#827 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#829 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#829 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#474 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#474 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#829 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#828 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#475 &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->page_table_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->page_table_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#832 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#833 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#477 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#472 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#835 irq_context: 0 &dev->mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 &dev->mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 &dev->mutex uevent_sock_mutex &meta->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#838 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#839 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#839 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#840 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#483 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#153 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#153 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#153 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#842 &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->mutex &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#842 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#484 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#154 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#843 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#486 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#486 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#843 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#845 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#845 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#845 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#844 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#488 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#847 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#847 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#156 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#490 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#849 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#849 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#850 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#486 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#850 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#851 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#853 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#491 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#489 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#854 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#854 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#496 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#496 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#854 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#856 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#497 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#856 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#857 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#857 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#857 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#857 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#499 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#495 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#859 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#860 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#554 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#178 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#944 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#553 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#180 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#40 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#555 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#555 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#556 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#552 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#949 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#557 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#951 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#951 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#951 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#182 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#953 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#958 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#961 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#569 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#569 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#569 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#569 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#971 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#572 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#572 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#976 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#980 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#980 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#577 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#981 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#989 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#989 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#992 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#992 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#992 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#196 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1008 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#106 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#106 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#106 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#105 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#104 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#104 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#104 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#626 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#626 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#338 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#338 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#559 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#954 irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#24 &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#955 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#183 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#956 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#957 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#957 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#957 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#957 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#958 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#958 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#958 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#564 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#961 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#961 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#961 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#961 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#186 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#184 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#963 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#963 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#41 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#567 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#966 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#188 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#186 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#969 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#969 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#969 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#971 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#568 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#189 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#973 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#973 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#973 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#43 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#573 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#977 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#977 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#977 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#569 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#978 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#978 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#576 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#981 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#981 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1103 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#983 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#983 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#983 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#577 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#575 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#985 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#985 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#986 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#583 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#585 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#994 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#993 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#995 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#995 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#582 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#997 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#998 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#998 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#197 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#999 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#999 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#999 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#590 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#591 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1001 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1001 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#592 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#593 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1003 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1003 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#594 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#594 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#588 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1004 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1006 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1006 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#589 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1007 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#596 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1008 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1008 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1008 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#597 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1010 irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1010 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1010 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#46 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1012 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1012 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#600 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1014 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1015 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1015 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1018 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1017 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1019 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1019 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1020 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1020 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1019 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#598 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#202 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#202 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#201 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#200 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1021 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1021 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1021 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1020 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1022 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#606 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#606 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1022 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#602 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#607 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#607 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#607 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#607 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1023 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1023 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#303 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1026 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1026 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1026 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1025 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1027 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1028 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1028 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1029 irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1030 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1030 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#48 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1030 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1032 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1032 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#613 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#613 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#207 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#207 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1034 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#209 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#209 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#50 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#210 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#210 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#210 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1036 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#51 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1038 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1038 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#614 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1038 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1040 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1040 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1041 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1041 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#616 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1043 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#615 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#615 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1044 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1045 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#8 irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1070 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1070 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#641 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#641 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#595 &rq->__lock irq_context: 0 nf_sockopt_mutex &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1103 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1103 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#215 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#215 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1047 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1048 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1048 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#619 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1049 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1050 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1050 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1146 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1146 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1146 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1146 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1144 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#216 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#216 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1053 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1053 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#629 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#629 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#625 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#625 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1055 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1055 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#631 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#627 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#626 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#633 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1061 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1061 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1061 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#228 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1062 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1062 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1082 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1083 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1062 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#54 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#54 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#7 &rq->__lock irq_context: 0 (wq_completion)hci3#4 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#388 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#181 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#178 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#179 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#9 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1085 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1088 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1103 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1103 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1101 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1101 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#9 irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &p->pi_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#217 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#217 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#636 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#636 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1064 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#221 irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#20 irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rcu_state.expedited_wq irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx system_transition_mutex (pm_chain_head).rwsem rtnl_mutex &wg->device_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#595 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#597 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#597 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#597 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#319 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#596 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#315 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#313 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#598 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#598 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#598 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#598 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#598 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#597 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#316 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#314 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#599 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#599 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#599 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#598 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#600 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#600 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#600 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#599 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#100 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#321 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#321 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#317 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#99 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#315 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#98 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#98 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy339 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#601 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#601 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#600 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy339 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#602 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#662 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1101 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#602 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#602 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#602 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#602 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#601 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#322 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#318 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#316 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy342 irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#23 &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy342 &rq->__lock irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#23 &p->pi_lock &rq->__lock irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#23 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#604 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#604 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#604 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#603 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy342 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#659 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#605 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#605 irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#659 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#659 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#659 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#655 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#605 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#604 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#323 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#323 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#319 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#319 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#319 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#317 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#606 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#606 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#605 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#605 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#605 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#324 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#324 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#324 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#324 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#324 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#324 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#101 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#101 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#320 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#318 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#100 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#100 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#100 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#99 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#607 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#606 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#325 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#321 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#321 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#321 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#319 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#319 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#319 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#608 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#608 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#608 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#607 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#607 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#607 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#102 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#102 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#101 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#100 irq_context: 0 &dev->mutex rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#609 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#609 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#609 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#609 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#656 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#608 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#334 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#626 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#308 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#625 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#332 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#627 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#627 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#627 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#627 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#627 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#626 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#626 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#610 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#610 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#610 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#610 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#610 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#609 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#339 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#339 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#335 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#326 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#326 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#322 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#320 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#611 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#610 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#610 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#610 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#612 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#612 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#224 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#638 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1067 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#641 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#641 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#637 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1071 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#611 irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1072 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1072 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1071 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1073 irq_context: 0 (wq_completion)wg-kex-wg1#305 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#644 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#644 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#640 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#645 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#645 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1078 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1079 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1080 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1082 irq_context: 0 bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1086 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1086 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc2_nci_rx_wq#1087 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#1087 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1088 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1088 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1088 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#227 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1089 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1089 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1089 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#652 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#652 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#652 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#655 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#655 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#653 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1091 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1104 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1104 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1104 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1104 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1104 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1102 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 fs_reclaim irq_context: 0 (wq_completion)nfc15_nci_tx_wq#11 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy342 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &meta->lock irq_context: 0 (wq_completion)nfc7_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1093 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#9 irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)nfc34_nci_rx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1096 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#654 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy341 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1105 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1105 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1103 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1097 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1106 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#657 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1106 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1104 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#661 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#661 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#661 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#661 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#657 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#657 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#655 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#655 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#655 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1107 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1107 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1107 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1107 &rq->__lock irq_context: softirq (&peer->timer_retransmit_handshake) &obj_hash[i].lock irq_context: softirq (&peer->timer_retransmit_handshake) &list->lock#14 irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh &base->lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg0#304 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1108 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1109 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1110 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1110 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1110 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1108 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1111 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#664 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1112 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#23 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc37_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq irq_context: 0 (wq_completion)nfc42_nci_cmd_wq irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)nfc42_nci_rx_wq irq_context: 0 (wq_completion)nfc42_nci_tx_wq irq_context: 0 misc_mtx (wq_completion)nfc44_nci_cmd_wq irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#13 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#306 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#309 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#310 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1117 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1115 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1118 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1118 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1118 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1120 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1120 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)bond0#170 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1121 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#662 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#660 irq_context: 0 (wq_completion)hci0#5 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#663 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1124 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1122 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#230 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1126 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1126 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1126 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1124 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#662 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#172 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#13 irq_context: 0 rtnl_mutex team->team_lock_key#172 irq_context: 0 rtnl_mutex team->team_lock_key#172 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#172 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#11 &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 fs_reclaim irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1127 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#25 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#672 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#672 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1128 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#14 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)nfc10_nci_rx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#673 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#673 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_rx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#18 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1144 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_rx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1130 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1130 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1144 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &dir->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#19 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#19 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#19 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy337 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#172 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#18 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex stock_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex key irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex pcpu_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex percpu_counters_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#17 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_rx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_tx_wq#16 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_rx_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_rx_wq#10 &rq->__lock irq_context: 0 &u->iolock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &u->iolock rcu_read_lock &obj_hash[i].lock irq_context: 0 &u->iolock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci4 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#297 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#165 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#165 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#165 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#166 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#166 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#166 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#38 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#38 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#37 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#169 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#169 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#169 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#169 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#166 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#167 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#167 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#168 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#171 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#171 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#168 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#169 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#169 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#169 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#169 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#170 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#170 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#171 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#174 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#171 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#172 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#390 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#390 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#390 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#390 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#389 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#42 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#390 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#42 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#182 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#182 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#179 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#180 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#41 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#392 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#183 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#183 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#183 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#392 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#391 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#183 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#180 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#181 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#42 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#42 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#42 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#393 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#393 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#392 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#392 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#184 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#184 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#181 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#181 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#181 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#182 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#44 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#394 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#394 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#394 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#393 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#395 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#395 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#395 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#395 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#395 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#395 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#395 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#395 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#394 irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#185 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#185 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#185 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#185 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#182 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#183 irq_context: 0 misc_mtx pcpu_alloc_mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#396 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#396 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#396 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#396 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#396 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#395 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#397 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#397 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#396 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#186 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#186 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#183 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#184 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#398 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#397 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#187 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#187 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#184 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#185 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#188 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#399 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#399 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#399 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#398 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#189 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#189 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#186 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#186 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#401 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#401 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#401 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#401 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#400 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#402 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#402 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#402 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#402 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#401 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#190 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#190 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#187 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#187 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#403 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#403 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#402 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#191 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#191 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#191 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#404 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#403 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#188 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#188 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#405 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#192 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#404 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#189 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#189 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#45 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#45 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#45 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#44 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#406 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#406 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#406 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#406 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#405 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#407 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#407 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#406 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#193 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#193 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#190 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#190 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#409 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#409 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#408 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#194 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#194 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#191 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#191 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#191 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#410 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#410 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#410 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#410 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#409 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#411 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#411 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#411 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#410 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#410 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#410 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#195 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#195 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#192 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#192 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#412 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#412 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#411 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#411 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#411 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#196 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#46 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#193 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#193 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#193 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#193 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#46 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#413 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#413 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#46 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#413 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#45 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#412 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#412 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#412 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#414 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#414 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#414 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#414 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#414 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#413 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#413 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#198 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#198 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#198 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#198 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#194 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#194 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#194 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#194 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#47 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#47 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#47 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#46 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#195 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#48 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#48 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#48 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#48 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#48 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#47 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#415 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#415 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#415 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#415 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#415 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#414 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#18 irq_context: 0 misc_mtx pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#196 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#196 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#196 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#417 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#417 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#417 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#417 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#417 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#416 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#49 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#49 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#49 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#48 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#418 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#418 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#418 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#418 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#418 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#417 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#50 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#49 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#419 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#419 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#419 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#418 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#201 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#201 irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#197 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#197 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#197 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#197 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#52 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#52 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#51 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#50 irq_context: 0 (wq_completion)wg-kex-wg2#292 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#298 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock rlock-AF_PACKET irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#149 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#20 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#20 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#424 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#424 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#204 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#204 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#204 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#423 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#204 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#200 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#200 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#425 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#425 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#425 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#424 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#205 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#201 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#426 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#201 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#426 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#426 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#425 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#206 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#206 irq_context: 0 &f->f_pos_lock pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &pcp->lock &zone->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#427 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#203 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#427 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#427 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#427 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#427 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#427 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#427 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#203 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#203 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#203 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#426 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#426 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#426 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#208 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#428 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#428 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#428 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#428 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#428 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#428 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#209 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#209 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#205 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#427 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#204 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#55 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#54 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#53 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#429 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#429 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#429 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#428 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#210 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#210 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#206 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#205 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#430 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#430 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#429 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#211 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#211 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#206 irq_context: 0 misc_mtx pcpu_alloc_mutex pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#212 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#212 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#208 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#207 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#431 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#431 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#213 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#213 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#431 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#430 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#209 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#209 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#209 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#56 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#56 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#208 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#56 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#55 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#54 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#432 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#432 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#432 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#432 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#432 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#431 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#214 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#214 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#214 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#214 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#210 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#209 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#434 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#434 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#434 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#434 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#434 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#433 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#433 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#215 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#215 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#215 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#215 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#211 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#435 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#435 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#435 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#434 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#434 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#216 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#216 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#212 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#212 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#212 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#436 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#436 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#436 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#436 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#435 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#217 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#217 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#213 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#213 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#213 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#213 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#212 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#57 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#57 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#56 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#55 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#55 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#55 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#437 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#437 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#437 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#437 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#436 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#436 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#436 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#218 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#218 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#214 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#213 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#58 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#58 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#58 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#58 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#57 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#57 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#57 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#56 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#438 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#438 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#438 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#437 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#437 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#437 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#439 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#439 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#438 irq_context: 0 &type->i_mutex_dir_key#4 &meta->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#219 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#219 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#215 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#214 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#440 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#440 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#440 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#440 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#440 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#440 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#440 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#439 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#439 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#439 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#220 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#216 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#216 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#216 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#215 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#442 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#442 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#442 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#441 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#221 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#221 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#217 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#216 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#443 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#443 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#443 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#443 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#442 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#222 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#222 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#218 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#217 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#217 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#217 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#223 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#223 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#219 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#219 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#219 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#218 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#59 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#59 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#58 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#57 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#445 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#445 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#220 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#220 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#224 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#224 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#224 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#446 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#446 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#446 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#445 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#225 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#225 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#225 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#225 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#219 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#447 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#447 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#447 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#447 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#447 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#446 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#446 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#446 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#222 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#220 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#448 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#448 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#448 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#447 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#449 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#449 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#449 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#449 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#449 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#448 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#448 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#448 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#227 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#227 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#223 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#223 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#223 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#221 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#450 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#450 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#450 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#449 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#449 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#449 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#60 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#58 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#451 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#451 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#451 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#451 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#451 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#450 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#452 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#452 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#452 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#451 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#228 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#224 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#224 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#224 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#222 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#453 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#61 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#61 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#453 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#453 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#453 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#452 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#60 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#59 irq_context: 0 &dev->mutex subsys mutex#40 &lock->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &lock->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#229 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#229 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#225 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#223 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#453 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#230 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#230 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#62 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#62 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#226 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#224 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#61 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#60 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#454 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#231 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#231 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#231 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#231 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#227 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#225 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#456 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#456 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#455 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#226 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#457 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#457 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#457 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#456 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#458 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#458 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#458 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#458 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#457 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#459 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#459 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#459 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#458 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#460 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#460 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#460 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#460 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#459 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#229 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#229 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#227 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#461 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#461 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#461 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#460 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#234 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#234 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#230 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#228 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#462 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#461 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#461 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#461 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#235 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#235 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#235 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#235 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#231 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#229 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#63 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#63 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#63 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#463 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#463 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#63 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#463 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#462 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#462 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#462 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#62 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#61 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#61 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#61 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#236 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#236 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#232 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#230 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#464 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#464 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#464 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#463 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#465 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#465 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#465 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#465 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#464 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#466 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#466 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#466 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#465 irq_context: softirq (&peer->timer_retransmit_handshake) &list->lock#14 pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#467 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#466 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#237 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#237 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#64 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#64 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#63 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#233 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#62 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#231 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#22 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#468 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#468 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#468 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#467 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#469 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#469 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#468 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#470 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#470 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#470 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#238 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#238 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#234 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#234 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#234 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#232 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#232 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#232 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#65 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#65 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#63 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#63 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#63 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#471 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#471 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#471 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#471 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#471 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#471 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#471 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#470 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#235 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#233 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#472 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#472 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#471 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#240 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#240 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#236 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#234 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#473 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#473 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#473 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#473 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#473 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#472 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#241 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#241 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#241 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#241 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#237 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#237 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#237 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#235 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#474 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#474 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#473 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#242 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#242 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#242 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#242 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#238 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#236 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#475 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#475 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#475 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#475 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#475 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#474 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#243 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#476 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#476 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#476 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#476 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#239 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#476 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#237 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#475 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#66 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#66 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#65 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#64 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#477 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#476 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#477 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#479 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#479 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#479 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#478 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#244 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#244 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#67 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#67 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#67 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#67 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#66 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#65 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#65 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#65 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#240 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#240 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#240 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#238 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#238 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#238 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#480 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#480 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#480 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#480 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#479 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#245 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#245 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#245 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#241 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#241 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#241 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#239 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#239 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#239 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#481 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#481 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#481 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#480 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#68 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#68 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#66 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#482 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#482 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#482 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#482 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#481 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#481 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#246 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#246 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#246 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#246 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#246 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#246 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#242 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#483 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#483 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#483 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#240 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#482 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#69 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#69 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#68 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#68 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#68 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#24 irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#67 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#67 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#67 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#484 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#484 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#484 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#483 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#247 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#247 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#241 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#485 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#485 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#485 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#484 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#70 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#70 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#248 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#69 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#248 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#248 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#244 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#242 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#486 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#486 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#485 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#485 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#487 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#487 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#486 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#245 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#243 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#488 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#488 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#488 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#487 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#71 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#71 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#70 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#70 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#69 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#489 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#489 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#489 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#488 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#490 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#490 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#490 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#490 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#489 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#489 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#489 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#250 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#250 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#250 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#250 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#246 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#246 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#246 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#72 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#244 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#72 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#244 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#244 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#71 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#70 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#247 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#245 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#491 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#491 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#491 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#490 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#73 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#73 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#72 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#71 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#493 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#493 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#493 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#493 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#493 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#492 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#252 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#252 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#248 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#246 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#494 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#494 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#493 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#253 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#253 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#249 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#247 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#495 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#495 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#496 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#496 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#496 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#496 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#496 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#496 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#495 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#254 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#254 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#254 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#250 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#248 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#497 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#497 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#74 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#74 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#497 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#497 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#497 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#73 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#72 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#496 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#498 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#497 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#251 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#251 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#251 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#499 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#499 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#499 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#249 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#498 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#498 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#498 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#75 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#75 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#74 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#73 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem key irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#24 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#24 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#500 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#500 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#500 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#499 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#501 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#501 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#500 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#256 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#252 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#250 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#502 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#502 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#502 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#502 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#502 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#501 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#76 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#75 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#74 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#504 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#504 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#503 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#503 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#503 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#257 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#257 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#257 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#257 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#253 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#253 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#253 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#251 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#6 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->packet.sklist_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->packet.sklist_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->packet.sklist_lock pool_lock#2 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#25 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#25 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#25 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#25 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#505 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#505 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#505 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#505 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#505 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#504 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#258 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#258 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#258 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#254 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#252 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#78 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#78 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#76 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#506 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#506 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#506 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#505 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#259 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#259 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#255 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#253 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#253 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#253 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#27 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#27 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#26 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#26 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#26 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#26 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#507 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#507 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#506 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#506 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#79 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#79 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#78 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#77 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#256 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#254 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#80 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#80 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#79 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#78 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#78 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#78 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#261 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#257 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#255 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#508 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#508 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#508 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#507 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#507 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#507 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#81 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#81 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#80 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#79 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#262 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#262 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#262 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#258 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#256 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#263 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#263 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#259 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#257 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#509 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#509 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#508 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#264 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#264 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#260 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#258 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#510 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#510 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#510 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#509 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#511 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#511 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#510 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#265 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#265 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#265 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#265 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#261 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#261 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#261 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#259 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#82 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#82 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#81 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#80 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#80 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#80 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#512 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#512 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#512 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#511 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#513 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#513 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#512 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#266 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#266 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#266 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#262 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#260 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#514 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#514 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#514 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#514 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#514 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#267 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#267 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#263 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#515 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#515 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#261 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#515 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#514 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#516 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#516 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#516 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#515 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#268 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#268 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#268 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#264 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#264 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#264 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#262 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#517 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#517 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#517 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#516 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#518 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#518 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#518 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#518 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#518 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#269 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#269 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#265 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#263 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#270 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#270 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#270 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#519 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#270 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#270 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#270 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#518 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#266 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#264 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#264 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#264 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#271 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#267 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#265 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#83 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#83 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#82 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#81 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#520 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#520 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#520 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#519 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#521 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#521 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#521 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#520 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#520 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#272 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#272 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#272 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#272 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#268 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#266 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#522 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#522 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#521 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#84 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#84 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#84 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#84 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#83 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#83 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#83 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#82 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#523 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#522 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#524 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#524 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#523 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#273 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#269 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#267 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#267 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#525 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#525 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#525 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#524 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#270 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#268 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#526 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#526 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#526 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#526 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#526 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#526 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#525 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#275 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#275 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#271 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#271 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#271 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#85 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#85 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#85 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#269 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#28 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#28 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#27 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#27 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#527 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#276 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#276 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#272 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#527 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#270 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#526 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#277 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#273 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#271 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#528 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#528 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#528 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#527 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#86 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#86 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#86 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#86 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#85 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#84 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#274 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#272 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#272 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#529 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#529 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#529 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#528 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#530 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#530 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#530 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#530 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#529 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#279 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#279 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#275 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#273 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#273 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#273 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#280 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#276 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#274 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#531 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#531 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#531 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#530 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#87 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#87 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#86 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#85 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#85 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#85 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#532 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#532 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#531 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#281 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#281 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#277 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#275 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#282 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#282 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#278 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#276 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#276 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#276 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#533 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#533 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#533 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#533 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#533 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#533 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#533 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#532 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#532 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#532 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#88 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#88 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#88 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#88 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#87 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#86 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#283 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#283 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#279 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#277 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#534 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#534 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#284 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#284 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#280 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#278 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#535 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#535 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#535 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#535 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#281 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#279 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#535 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#534 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#286 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#282 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#536 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#536 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#280 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#536 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#535 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#89 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#89 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#88 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#87 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#87 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#537 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#537 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#287 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#537 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#536 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#536 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#536 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#287 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#536 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#536 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#283 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#283 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#283 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#281 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#538 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#538 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#538 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#537 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#288 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#288 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#284 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#284 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#284 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#282 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#539 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#539 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#539 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#539 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#539 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#539 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#539 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#538 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#538 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#538 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#540 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#540 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#539 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#541 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#541 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#541 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#541 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#541 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#541 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#540 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#540 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#540 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#289 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#289 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#285 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#283 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#283 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#283 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#542 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#542 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#541 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#90 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#90 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#90 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#90 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#89 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#88 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#88 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#88 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#29 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#29 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#29 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#29 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#28 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#28 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#28 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#28 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#28 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#28 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#543 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#543 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#543 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#543 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#542 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#543 irq_context: 0 &xt[i].mutex &mm->mmap_lock pool_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &base->lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#545 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#545 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#545 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#545 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#544 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#290 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#290 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#286 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#284 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#546 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#546 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#546 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#546 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#546 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#545 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#545 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#545 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#291 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#291 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#547 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#547 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#546 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#292 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#292 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#288 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#548 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#548 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#286 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#548 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#548 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#286 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#548 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#286 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#547 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#549 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#549 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#549 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#549 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#289 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#289 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#289 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#548 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#548 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#548 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#287 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#91 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#91 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#90 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#89 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#550 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#550 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#550 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#549 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#551 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#551 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#551 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#550 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#552 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#552 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#552 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#552 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#551 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#551 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#551 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#294 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#294 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#290 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#290 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#553 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#553 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#553 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#553 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#553 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#552 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#552 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#552 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#554 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#554 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#554 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#553 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#555 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#555 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#554 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#295 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#291 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#289 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#296 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#296 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#556 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#556 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#292 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#556 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#290 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#92 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#92 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#91 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#555 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#557 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#557 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#297 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#297 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#556 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#93 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#93 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#91 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#293 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#291 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#558 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#558 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#558 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#558 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#557 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#559 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#559 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#559 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#558 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#298 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#298 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#298 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#294 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#292 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#560 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#560 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#560 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#559 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#299 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#299 irq_context: 0 (wq_completion)hci2 irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci2#2 irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#295 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#295 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#295 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#293 irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->power_on) &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci2 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci2#2 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci2#2 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#561 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#561 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#561 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#561 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#560 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#296 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#294 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#94 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#94 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#93 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#92 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#562 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#562 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#561 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#301 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#297 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#297 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#297 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#295 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#563 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#563 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#562 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#302 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#302 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#298 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#296 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#564 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#564 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#303 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#303 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#564 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#564 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#564 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#299 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#563 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#565 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#565 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#565 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#565 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#565 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#564 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#564 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#564 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &x->wait#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#327 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#327 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#327 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#327 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#323 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#321 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#321 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#321 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#103 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#103 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#102 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#101 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#328 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#328 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#328 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#324 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#324 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#324 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#322 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#613 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#613 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#612 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#614 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#614 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#613 irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &sem->wait_lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#284 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#615 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#615 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#615 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#614 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#616 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#333 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#628 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#628 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg0#160 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#627 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#315 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#340 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#340 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#334 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1107 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1105 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#629 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#629 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#629 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#629 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#628 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#341 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#341 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#337 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#335 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#630 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#630 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#342 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#630 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#342 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#342 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#342 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#338 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#336 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#616 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#616 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#615 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#615 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#329 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#329 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#325 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#325 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#325 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#323 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#304 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#304 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#330 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#330 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#330 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#617 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#617 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#617 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#326 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#324 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#617 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#324 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#324 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#616 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#104 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#103 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#102 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#618 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#618 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#617 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#331 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#331 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#331 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#331 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#327 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#327 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#327 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#325 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#619 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#619 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#619 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#619 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#618 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#332 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#332 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#328 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#326 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#620 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#620 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#620 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#620 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#620 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#621 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#621 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#105 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#300 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#298 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#105 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#104 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#103 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#620 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#629 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)bond0#169 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#631 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#631 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#333 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#333 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#329 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#631 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#631 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#631 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#630 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#317 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#632 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#632 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#309 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#318 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#309 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#306 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#566 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#566 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#566 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#566 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#566 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#565 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#160 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#567 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#567 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#567 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#567 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#566 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#310 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#568 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#568 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#568 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#305 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#305 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#567 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc3_nci_rx_wq#301 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#299 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#569 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#569 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#155 irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#152 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#569 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#569 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#568 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#632 irq_context: 0 (wq_completion)wg-crypt-wg0#161 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#570 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#570 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#569 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#571 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#571 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#571 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#570 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#570 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#570 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#307 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#572 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#572 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#572 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#572 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#572 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#572 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#572 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#571 irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#306 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#306 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#306 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#302 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#300 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)bond0#170 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#632 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#631 irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#306 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#343 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#343 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#339 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#337 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#107 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#107 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#106 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#105 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#633 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#633 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#632 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#634 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#634 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#633 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#344 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#344 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#344 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#344 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#340 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#635 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#634 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#338 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#106 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#636 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#636 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#636 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#635 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#635 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#635 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#637 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#637 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#637 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#636 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#638 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#638 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#637 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#637 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#637 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#345 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#345 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#341 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#341 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#339 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#639 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc35_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_tx_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#639 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#639 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#638 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#109 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#573 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#573 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#573 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#572 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#303 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#301 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#109 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#108 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#107 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#346 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#346 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#346 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#342 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#574 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#574 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#574 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#574 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#573 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#575 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#575 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#575 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#575 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_send irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#308 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#308 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#574 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#304 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#302 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#576 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#576 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#575 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#577 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#577 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#576 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#578 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#578 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#578 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#577 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#579 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#579 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#579 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#579 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#578 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#309 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#309 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#309 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#309 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#305 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#305 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#305 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#580 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#580 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#580 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#580 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#579 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#579 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#579 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#310 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#310 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#306 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#304 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#581 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#311 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#311 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#311 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#582 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#582 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#582 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#311 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#307 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#581 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#305 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#340 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#640 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#640 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#583 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#583 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#639 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#347 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#347 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#347 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#347 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#343 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#341 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#641 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#641 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#641 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#641 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#640 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#640 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#640 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#348 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#348 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#344 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#342 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#642 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#642 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#641 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#349 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#349 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#345 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#343 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#643 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#643 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#643 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#642 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#644 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#644 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#350 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#583 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#582 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#582 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#582 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#350 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#346 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#344 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#643 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#645 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#645 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#645 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#645 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#644 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#646 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#646 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#351 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#351 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#347 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#345 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#345 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#345 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#647 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#647 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#647 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#647 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#647 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#647 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#647 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#646 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#352 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#352 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#352 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#348 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#346 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#648 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#648 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#648 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#647 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#353 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#353 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#353 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#353 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#349 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#347 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#110 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#110 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#110 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#110 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#109 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#108 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#108 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#108 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#31 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#31 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#30 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#649 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#649 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#649 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#648 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#354 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#350 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#348 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#651 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#651 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#651 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#651 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#651 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#650 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#650 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#650 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#111 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#110 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#109 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#652 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#652 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#651 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#653 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#653 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#653 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#652 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#355 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#355 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#351 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#349 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#654 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#654 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#654 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#654 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#654 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#653 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#356 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#356 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#352 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#350 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#350 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#350 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#655 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#655 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#655 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#655 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#654 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#357 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#357 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#656 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#655 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#353 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#351 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#112 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#112 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#111 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#657 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#657 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#657 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#656 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#352 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#658 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#658 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#658 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#658 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#658 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#657 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#359 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#359 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#359 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#359 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#355 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#353 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#660 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#660 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#660 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#360 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#360 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#360 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#360 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#356 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#356 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#356 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#659 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#354 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#113 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#113 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#112 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#111 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#660 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#660 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#361 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#361 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#357 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#355 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#662 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#662 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#662 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#662 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#584 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#312 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#312 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#312 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#583 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#312 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#308 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#306 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#306 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#306 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#661 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#663 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#663 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#663 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#662 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#363 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#363 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#363 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#363 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#359 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#357 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#664 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#664 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#663 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#364 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#364 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#364 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#364 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#360 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#358 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#665 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#665 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#665 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#665 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#95 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#95 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#94 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#93 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#664 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#664 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#664 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#666 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#666 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#666 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#666 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#665 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#365 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#365 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#361 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#361 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#359 irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#667 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#667 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#667 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#360 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#668 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#668 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#668 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#668 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#667 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#667 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#667 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#367 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#363 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#585 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#585 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#585 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#584 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#313 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#313 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#313 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#313 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#309 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#309 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#309 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#307 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#96 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#363 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#363 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#361 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#361 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#368 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#368 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#364 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#362 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#362 &rq->__lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#96 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#96 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#96 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#95 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#94 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#586 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#586 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#586 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#585 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#362 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#670 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#670 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#670 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#669 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#671 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#671 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#671 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#671 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#670 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#369 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#369 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#369 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#369 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#365 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#363 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#672 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#672 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#671 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#671 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#671 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#370 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#370 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#366 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#364 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#114 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#114 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#114 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#114 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#113 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#587 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#587 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#587 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#113 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#97 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#97 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#113 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#112 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#587 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#96 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#95 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#586 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#673 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#673 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#673 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#673 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#673 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#371 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#314 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#314 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#310 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#30 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#30 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#29 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#308 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#29 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#29 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#371 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#367 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#365 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#674 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#674 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#673 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#673 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#673 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#372 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#372 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#368 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#366 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#366 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#366 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#366 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#675 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#587 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#315 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#675 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#675 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#674 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#373 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#373 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#369 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#369 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#367 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#676 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#311 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#309 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#676 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#676 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#675 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#677 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#154 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#662 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#662 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#662 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#658 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#658 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#656 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1108 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1108 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1108 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1106 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1109 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1109 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1109 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1109 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1107 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#663 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#663 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#659 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1111 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1111 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1111 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1111 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1111 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1111 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1109 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#660 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#658 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#658 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#658 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1112 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1112 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1110 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#12 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#589 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#589 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#589 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_tx_wq#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#588 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#10 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc24_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#10 irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#171 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc23_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc22_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#11 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc19_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#11 irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#13 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &____s->seqcount irq_context: 0 (wq_completion)nfc20_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#10 rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#316 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#316 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc36_nci_rx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc36_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1113 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1113 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1113 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1113 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1113 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1113 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1111 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1114 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1114 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1114 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1114 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc42_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1114 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc42_nci_rx_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1112 irq_context: 0 (wq_completion)nfc42_nci_tx_wq#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1112 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1112 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc43_nci_cmd_wq irq_context: 0 (wq_completion)nfc43_nci_rx_wq irq_context: 0 (wq_completion)nfc43_nci_tx_wq irq_context: 0 (wq_completion)nfc43_nci_tx_wq &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc41_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc41_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc41_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc41_nci_rx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc41_nci_rx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc41_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#312 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#677 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#676 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#374 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#374 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#370 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#368 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#678 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#678 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#678 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#678 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#677 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#375 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#375 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#371 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#371 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#371 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#369 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#678 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#376 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#376 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#372 irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#370 irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#115 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#115 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#680 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#680 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#680 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#115 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#679 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#114 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#679 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#679 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#114 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#114 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#113 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#113 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#113 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#679 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#679 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#681 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#680 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#377 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#373 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#373 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#373 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#371 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#682 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#682 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#682 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#681 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#681 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#378 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#374 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#372 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#683 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#683 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#683 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#683 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#683 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#682 irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock stock_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock key irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock pcpu_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock percpu_counters_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#379 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#379 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#375 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#373 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#684 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#683 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#380 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#380 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#376 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#376 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#376 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#374 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#685 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#685 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#684 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#381 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#377 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#377 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#377 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#375 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#686 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#686 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#686 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#685 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#687 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#382 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#382 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#687 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#378 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#376 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#686 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#688 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#688 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#688 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#688 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#687 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#687 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#687 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#383 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#383 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#379 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#379 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#379 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#377 irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#689 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#689 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#689 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#688 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#688 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#688 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#384 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#380 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#378 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#378 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#378 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#690 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#690 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#690 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#689 irq_context: 0 misc_mtx fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#385 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#385 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#381 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#381 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#381 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#379 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#691 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#691 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#691 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#690 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#692 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#692 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#692 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#691 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#691 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#691 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#386 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#386 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#382 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#380 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#693 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#693 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#693 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#693 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#692 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#387 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#387 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#694 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#694 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#694 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#693 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#383 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#381 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#694 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#388 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#388 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#384 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#382 irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg1#155 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#696 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#696 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#116 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#116 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#115 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#695 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#114 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#389 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#389 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#389 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#389 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#383 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#697 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#697 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#697 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#697 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#697 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#696 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#698 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#698 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#698 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#697 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#697 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#697 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#390 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#390 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#386 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#384 irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#290 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#699 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#699 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#699 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#698 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#117 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#117 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#116 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#115 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#115 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#700 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#700 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#700 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#700 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#699 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#391 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#391 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#387 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#387 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#387 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#385 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#701 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#701 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#701 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#701 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#701 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#701 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#701 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#700 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#118 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#117 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#702 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#702 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#116 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#116 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#116 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#702 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#701 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#702 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#392 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#388 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#388 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#388 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#386 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#704 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#704 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#704 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#704 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#704 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#703 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#119 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#119 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#16 &rq->__lock irq_context: 0 misc_mtx &dev->mutex rcu_node_0 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#118 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#117 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#117 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#117 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#393 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#393 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#393 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#389 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#387 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#387 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#387 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#705 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#705 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#705 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#705 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#705 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#704 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#394 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#394 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#390 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#388 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#388 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#388 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#395 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#391 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#706 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#706 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#389 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#706 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#705 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#705 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#705 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#707 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#706 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#396 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#396 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#396 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#396 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#392 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#392 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#707 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#397 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#397 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#397 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#393 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#393 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#393 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#391 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#709 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#709 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#709 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#709 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#709 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#709 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#709 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#708 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#710 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#710 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#710 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#709 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#398 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#398 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#394 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#392 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#392 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#711 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#711 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#711 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#711 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#711 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#710 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#712 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#712 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#712 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#712 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#711 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#399 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#399 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#399 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#395 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#393 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#713 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#713 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#713 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#713 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#713 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#712 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#396 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#714 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#714 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#714 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#713 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#715 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#715 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#715 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#715 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#714 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#401 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#401 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#397 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#395 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#716 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#716 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#715 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#402 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#402 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#402 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#717 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#717 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#402 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#398 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#717 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#717 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#717 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#396 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#716 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#716 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#716 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#120 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#120 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#120 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#120 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#119 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#118 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#718 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#718 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#718 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#718 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#719 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#719 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#718 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#403 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#403 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#399 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#121 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#399 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#121 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#399 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#120 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#120 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#120 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#119 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#397 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#720 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#720 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#720 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#720 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#719 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#719 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#719 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#404 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#404 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#404 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#404 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#400 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#398 irq_context: 0 &dev->mutex kn->active#4 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#721 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#721 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#721 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#720 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#405 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#405 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#405 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#721 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#405 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#401 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#122 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#122 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#122 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#122 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#121 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#120 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#120 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#120 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#723 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#723 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#723 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#723 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#723 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#723 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#723 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#722 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#724 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#724 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#724 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#724 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#724 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#723 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#725 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#725 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#725 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#725 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#724 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#406 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#402 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#402 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#402 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#400 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#400 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#400 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#726 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#726 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#726 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#725 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#727 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#727 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#727 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#726 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#407 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#407 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#407 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#403 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#401 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#728 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#728 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#728 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#728 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#728 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#727 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#408 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#408 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#404 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#404 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#404 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#402 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#729 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#729 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#729 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#729 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#729 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#728 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#409 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#409 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#409 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#409 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#405 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#403 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#730 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#730 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#730 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#730 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#730 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#729 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#729 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#729 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#410 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#410 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#410 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#406 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#404 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#731 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#731 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#731 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#731 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#731 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#731 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#730 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#730 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#730 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#732 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#732 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#732 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#411 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#411 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#411 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#732 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#732 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#731 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#411 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#407 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#405 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#733 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#733 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#733 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#733 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#732 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#412 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#412 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#408 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#406 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#734 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#734 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#734 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#734 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#734 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#733 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#733 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#733 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#413 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#413 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#413 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#409 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#407 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#407 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#735 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#735 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#414 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#414 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#410 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#408 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#123 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#123 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#122 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#121 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#736 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#736 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#736 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#735 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#737 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#737 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#737 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#736 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#415 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#415 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#415 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#415 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#738 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#738 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#411 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#409 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#738 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#738 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#738 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#124 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#123 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#123 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#123 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#122 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#739 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#739 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#739 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#739 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#739 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#738 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#739 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#739 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#416 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#416 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#416 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#416 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#412 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#410 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#125 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#125 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#125 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#124 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#123 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#741 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#741 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#741 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#740 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#125 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#124 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#31 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#31 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#417 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#417 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#413 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#413 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#411 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#743 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#743 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#742 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#418 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#418 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#418 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#418 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#127 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#127 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#414 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#126 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#412 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#32 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#744 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#419 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#419 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#744 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#744 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#744 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#744 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#415 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#413 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#743 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#128 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#128 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#127 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#126 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#745 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#745 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#745 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#745 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#745 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#744 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#34 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#34 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#33 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#33 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#746 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#746 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#745 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#745 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#420 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#420 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#420 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#416 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#414 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#747 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#746 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#421 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#421 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#417 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#415 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#748 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#748 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#748 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#748 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#748 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#747 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#747 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#747 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex deferred_probe_mutex &rq->__lock irq_context: 0 &dev->mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &obj_hash[i].lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#749 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#749 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#749 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#749 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#749 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#749 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#748 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#750 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#750 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#750 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#749 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#422 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#422 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#418 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#416 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#416 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#416 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#751 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#751 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#750 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#423 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#423 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#419 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#419 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#419 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#417 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#752 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#752 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#752 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#752 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#752 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#752 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#752 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#751 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#424 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#424 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#420 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#418 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#753 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#753 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#753 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#752 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#425 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#425 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#425 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#425 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#421 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#421 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#421 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#419 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#754 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#754 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#754 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#754 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#754 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#753 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#426 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#426 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#422 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#420 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#755 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#755 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#755 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#755 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#755 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#754 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#427 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#427 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#129 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#129 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#129 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#128 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#127 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#127 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#427 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#756 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#756 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#423 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#423 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#423 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#421 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#756 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#755 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#755 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#755 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#757 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#757 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#757 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#756 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#428 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#428 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#428 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#428 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#424 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#422 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#758 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#758 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#758 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#758 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#758 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#758 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#758 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#757 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#757 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#757 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#429 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#429 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#425 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#425 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#425 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#423 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#423 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#423 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#130 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#130 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#129 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#129 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#129 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#128 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#759 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#759 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#759 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#430 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#430 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#430 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#426 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#426 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#426 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#424 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#424 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#424 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#131 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#131 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#760 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#760 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#130 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#129 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#760 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#759 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#431 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#431 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#431 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#431 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#427 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#425 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#425 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#425 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#761 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#760 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#432 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#432 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#428 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#426 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#426 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#426 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#762 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#762 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#762 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#762 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#762 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#132 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#132 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#131 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#761 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#130 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#763 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#763 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#762 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#433 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#433 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#433 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#433 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#429 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#427 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#427 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#764 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#764 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#764 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#764 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#763 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#430 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#428 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#428 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#428 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#765 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#765 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#765 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#765 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#765 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#764 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#435 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#435 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#435 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#435 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#766 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#766 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#766 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#765 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#431 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#429 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#767 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#766 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#766 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#766 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#768 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#768 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#437 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#768 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#437 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#768 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#433 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#767 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#433 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#433 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#767 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#767 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &pcp->lock &zone->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#35 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#35 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#34 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#34 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#34 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#34 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#34 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#133 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#133 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#132 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#132 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#132 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#131 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#769 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#769 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#769 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#769 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#769 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#769 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#769 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#768 irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#770 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#770 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#770 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#438 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#438 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#434 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#432 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#432 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#432 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#771 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#771 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#771 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#771 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#771 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#770 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#439 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#439 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#435 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#433 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#433 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#433 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#772 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#772 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#440 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#440 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#771 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#436 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#134 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#134 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#133 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#133 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#133 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#132 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#773 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#773 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#773 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#773 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#773 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#772 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#772 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#772 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#774 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#774 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#774 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#773 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#773 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#773 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#441 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#441 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#437 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#437 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#437 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#435 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#135 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#135 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#134 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#133 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#775 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#775 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#442 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#442 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#442 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#442 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#776 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#776 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#438 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#436 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#436 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#436 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#776 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#775 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#775 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#775 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#777 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#777 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#777 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#777 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#777 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#777 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#776 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#778 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#778 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#778 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#778 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#777 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#443 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#443 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#439 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#439 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#439 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#437 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#437 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#437 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#136 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#136 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#135 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#134 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#134 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#134 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#778 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#444 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#444 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#444 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#444 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#440 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#440 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#440 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#438 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#438 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#438 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#780 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#780 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#780 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#779 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#781 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#781 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#781 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#441 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#781 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#780 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#780 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#780 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#439 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#439 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#439 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#782 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#781 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#781 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#446 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#446 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#442 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#440 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#783 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#783 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#782 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#784 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#784 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#783 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#785 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#785 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#784 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#786 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#786 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#786 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#786 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#786 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#785 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#447 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#443 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#441 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#787 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#787 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#787 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#787 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#786 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#788 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#788 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#788 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#788 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#789 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#789 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#789 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#788 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#448 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#444 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#442 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#790 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#790 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#449 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#449 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#789 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#445 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#443 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#446 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#446 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#446 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#790 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#444 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#792 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#792 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#792 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#792 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#791 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#793 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#792 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#451 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#451 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#451 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#447 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#445 irq_context: 0 &dev->mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#137 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#137 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#137 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#137 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#136 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#135 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#794 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#794 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#794 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#794 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#794 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#793 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#452 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#448 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#446 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#796 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#796 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#796 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#796 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#796 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#795 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#453 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#453 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#449 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#449 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#449 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#447 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#447 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#797 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#797 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#454 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#796 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#450 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#450 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#450 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#448 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#448 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#448 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#138 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#138 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#137 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#136 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#455 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#455 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#455 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#455 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#451 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#449 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#799 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#799 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#799 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#798 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#798 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#798 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#139 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#139 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#139 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#139 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#138 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#137 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#137 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#137 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#800 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#800 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#800 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#799 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#800 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#452 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#452 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#452 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#450 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#450 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#802 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#802 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#802 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#801 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#457 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#457 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#457 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#453 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#451 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#451 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#451 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#140 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#140 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#140 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#140 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#139 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#138 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#803 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#803 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#803 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#802 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#458 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#458 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#454 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#452 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#141 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#141 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#140 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#140 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#140 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#139 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#804 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#804 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#804 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#804 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#804 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#803 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#805 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#805 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#805 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#805 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#805 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#804 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#2 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#2 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#807 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#807 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#807 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#807 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#142 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#807 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#142 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#142 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#806 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#806 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#806 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#142 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#141 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#459 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#459 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#140 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#455 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#455 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#455 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#808 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#808 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#808 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#808 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#808 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#808 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#808 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#807 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#143 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#143 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#809 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#142 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#142 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#142 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#460 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#456 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#454 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#454 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#454 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#141 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#808 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#35 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#35 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#35 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#810 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#810 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#810 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#810 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#810 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#809 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#461 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#457 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#811 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#811 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#811 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#455 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#810 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#144 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#144 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#144 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#144 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#143 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#143 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#143 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#142 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#812 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#812 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#812 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#811 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#811 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#811 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#152 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#813 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#813 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#813 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#813 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#813 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#812 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#462 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#458 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#456 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#459 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#457 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#814 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#814 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#814 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#813 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#464 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#464 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#464 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#460 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#460 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#460 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#458 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#815 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#815 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#815 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#814 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#145 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#145 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#144 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#143 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#816 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#816 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#816 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#815 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#816 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#465 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#465 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#465 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#461 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#459 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#146 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#145 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#144 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#818 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#818 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#818 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#818 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#818 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#819 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#819 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#819 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#818 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#467 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#467 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#467 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#467 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#147 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#147 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#463 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#463 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#463 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#146 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#145 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#461 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#820 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#820 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#820 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#820 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#819 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#819 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#819 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#468 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#468 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#464 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#462 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#821 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#821 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#820 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#820 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#469 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#469 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#469 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#469 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#465 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#463 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#822 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#822 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#822 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#822 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#822 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#822 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#822 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#148 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#148 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#147 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#146 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#823 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#823 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#823 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#823 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#822 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#822 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#822 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#470 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#470 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#466 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#464 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#824 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#824 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#824 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#824 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#824 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#823 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#471 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#471 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#471 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#467 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#467 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#467 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#465 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#465 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#825 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#825 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#825 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#825 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#825 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#825 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#825 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#149 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#149 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#824 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#148 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#147 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#825 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#825 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#825 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#472 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#468 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#468 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#468 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#466 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#827 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#827 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#827 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#826 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#473 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#473 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#469 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#469 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#469 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#467 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#828 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#828 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#828 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#828 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#828 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#470 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#150 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#150 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#149 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#148 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#468 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#830 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#830 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#830 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#830 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#830 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#829 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#475 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#831 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#831 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#831 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#831 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#831 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#831 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#475 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#830 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#471 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#469 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#151 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#151 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#150 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#149 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#832 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#832 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#832 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#832 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#831 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#476 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#476 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#476 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#476 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#476 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#476 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#472 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#470 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#833 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#833 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#833 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#832 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#477 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#477 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#477 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#477 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#473 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#471 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#834 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#834 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#834 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#834 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#834 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#833 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#833 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#833 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#478 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#478 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#474 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#835 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#835 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#835 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#835 irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#834 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#479 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#479 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#479 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#479 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#475 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#473 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#836 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#836 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#480 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#480 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#836 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#836 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#835 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#476 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#474 irq_context: 0 sb_writers#4 oom_adj_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#837 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#837 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#481 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#481 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#481 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#481 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#477 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#477 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#475 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#836 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#838 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#838 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#838 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#838 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#838 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#837 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#837 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#837 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#839 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#839 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#839 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#839 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#839 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#838 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#838 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#838 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#152 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#152 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#151 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#151 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#150 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#482 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#482 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#478 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#476 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#840 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#840 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#840 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#840 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#839 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#483 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#479 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#479 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#479 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#153 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#152 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#151 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#151 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#841 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#841 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#841 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#840 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#484 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#484 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#484 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#480 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#480 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#480 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#478 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#478 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#842 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#842 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#842 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#842 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#841 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#154 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#154 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#154 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#153 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#153 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#153 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#152 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#152 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#152 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#485 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#485 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#481 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#479 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#843 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#842 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#482 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#480 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#155 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#155 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#154 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#153 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#844 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#844 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#844 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#843 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#487 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#487 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#487 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#487 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#483 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#481 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#481 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#481 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#488 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#484 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#484 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#484 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#482 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#846 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#846 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#846 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#845 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#489 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#489 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#489 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#847 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#847 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#846 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#846 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#485 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#483 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#156 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#156 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#156 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#155 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#154 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#154 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#154 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#848 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#848 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#848 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#848 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#847 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#847 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#847 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#490 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#486 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#486 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#486 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#484 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#484 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#484 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#849 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#849 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#849 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#848 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#491 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#491 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#491 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#491 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#487 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#485 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#850 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#850 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#850 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#850 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#849 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#492 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#492 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#488 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#486 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#486 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#145 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#493 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#493 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#489 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#487 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#852 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#852 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#852 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#852 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#852 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#494 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#494 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#490 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#488 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#853 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#853 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#853 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#852 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#852 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#495 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#495 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#495 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#495 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#853 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#853 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#853 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#492 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#490 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#490 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#855 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#855 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#855 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#855 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#855 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#854 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#856 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#497 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#856 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#493 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#491 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#856 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#855 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#157 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#157 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#156 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#155 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#155 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#155 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#857 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#857 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#857 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#857 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#857 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#498 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#498 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#494 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#492 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#492 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#492 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#856 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#858 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#858 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#858 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#858 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#858 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#858 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#858 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#499 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#499 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#499 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#857 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#493 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#859 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#859 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#859 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#859 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#859 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#859 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#858 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#500 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#500 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#500 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#500 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#496 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#494 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#860 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#860 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#860 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#860 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#859 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#859 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#859 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#497 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#495 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#861 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#861 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#502 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#502 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#502 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#502 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#498 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#498 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#498 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#861 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#860 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#496 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#158 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#158 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#158 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#158 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#157 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#156 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#862 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#862 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#861 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#863 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#863 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#863 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#862 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#503 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#503 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#503 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#499 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#497 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#864 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#864 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#864 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#863 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#863 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#863 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#504 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#500 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#498 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#865 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#865 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#865 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#864 irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#867 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#867 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#867 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#866 irq_context: 0 &p->lock &of->mutex pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#868 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#868 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#868 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#867 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#159 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#158 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#157 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#869 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#869 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#869 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#869 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#869 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#868 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#505 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#505 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#501 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#499 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#499 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#870 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#870 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#870 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#869 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#871 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#871 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#870 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#870 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#870 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#506 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#506 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#506 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#506 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#502 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#500 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#500 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#500 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#872 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#872 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#872 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#507 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#507 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#507 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#873 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#873 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#872 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#874 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#874 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#874 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#874 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#873 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#873 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#873 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#508 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#508 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#504 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#502 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#875 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#875 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#875 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#874 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#874 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#874 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#509 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#509 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#509 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#509 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#505 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#503 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#160 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#160 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#160 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#160 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#159 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#876 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#876 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#876 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#510 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#510 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#875 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#506 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#504 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#161 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#161 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#160 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#159 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#159 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#159 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#877 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#877 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#877 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#877 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#877 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#876 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#876 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#876 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#878 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#878 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#878 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#878 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#878 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#878 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#878 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#511 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#511 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#507 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#505 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#879 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#879 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#879 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#512 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#512 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#508 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#506 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#880 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#880 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#880 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#880 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#880 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#879 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#513 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#509 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#507 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#881 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#881 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#881 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#881 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#881 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#880 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#880 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#880 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#514 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#514 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#510 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#510 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#510 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#508 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#882 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#882 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#881 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#881 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#881 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#515 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#515 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#515 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#515 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#511 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#509 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#162 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#162 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#161 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#161 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#161 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#160 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#883 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#883 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#883 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#883 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#883 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#882 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#882 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#884 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#884 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#883 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#883 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#883 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#516 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#516 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#516 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#512 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#510 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#510 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#510 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#885 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#885 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#884 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#517 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#517 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#517 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#517 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#513 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#511 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#511 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#511 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#886 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#886 irq_context: 0 misc_mtx fs_reclaim &cfs_rq->removed.lock irq_context: 0 misc_mtx fs_reclaim &obj_hash[i].lock irq_context: 0 misc_mtx fs_reclaim pool_lock#2 irq_context: 0 misc_mtx fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#886 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#886 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#886 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#885 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#163 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#163 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#162 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#162 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#162 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#161 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#887 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#887 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#887 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#887 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#887 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#886 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#886 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#886 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#888 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#888 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#888 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#887 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#887 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#887 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#518 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#518 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#514 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#512 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#889 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#889 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#889 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#888 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#519 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#519 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#515 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#515 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#515 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#513 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#164 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#164 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#163 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#162 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#890 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#890 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#889 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#891 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#891 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#891 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#891 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#891 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#890 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#890 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#892 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#892 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#892 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#892 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#892 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#891 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#520 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#520 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#516 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#516 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#516 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#514 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#893 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#893 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#893 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#893 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#893 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#521 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#521 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#521 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#521 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#892 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#517 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#515 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#894 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#894 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#894 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#893 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#895 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#895 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#895 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#894 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#522 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#522 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#518 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#518 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#518 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#516 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#165 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#164 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#164 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#164 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#523 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#523 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#519 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#163 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#517 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#896 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#896 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#896 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#896 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#896 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#895 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#897 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#897 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#896 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#524 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#524 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#520 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#518 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#166 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#166 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#525 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#166 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#525 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#525 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#521 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#519 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#898 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#898 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#898 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#166 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#165 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#897 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#899 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#899 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#899 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#899 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#898 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#522 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#520 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#520 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#520 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#520 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#900 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#900 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#900 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#527 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#527 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#899 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#523 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#521 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#521 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#521 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#167 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#167 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#166 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#165 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#901 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#901 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#901 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#901 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#901 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#901 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#901 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#900 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#524 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#522 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#902 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#902 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#902 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#902 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#902 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#902 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#902 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#901 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#901 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#901 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#529 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#525 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#523 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#903 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#903 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#903 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#903 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#902 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#904 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#904 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#904 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#904 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#903 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#905 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#905 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#905 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#904 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#526 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#524 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#906 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#906 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#905 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#531 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#531 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#527 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#525 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#907 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#906 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#906 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#906 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#908 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#908 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#908 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#907 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#909 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#909 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#909 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#909 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#909 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#909 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#909 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#532 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#532 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#532 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#532 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#908 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#528 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#526 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#526 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#526 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#168 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#167 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#166 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#910 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#910 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#910 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#910 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#910 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#909 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#909 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#909 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#911 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#911 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#911 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#910 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#533 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#912 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#533 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#912 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#533 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#912 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#911 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#911 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#911 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#527 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#527 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#527 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#169 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#169 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#169 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#169 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#168 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#168 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#168 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#167 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#167 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#167 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#913 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#913 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#913 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#912 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#912 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#912 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#914 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#914 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#914 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#914 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#913 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#913 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#913 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#915 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#915 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#914 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#914 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#534 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#534 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#530 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#528 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#170 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#169 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#168 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#916 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#916 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#535 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#535 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#535 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#535 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#916 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#531 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#531 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#531 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#529 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#915 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#529 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#529 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#917 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#916 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#536 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#536 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#532 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#532 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#532 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#530 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#530 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#530 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#918 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#918 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#918 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#918 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#918 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#917 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#917 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#917 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#919 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#919 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#919 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#918 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#537 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#533 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#531 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#531 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#531 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#920 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#919 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#919 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#919 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#921 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#921 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#921 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#921 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#921 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#921 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#921 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#532 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#920 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#922 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#922 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#922 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#922 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#922 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#921 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#921 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#921 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#923 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#923 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#923 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#922 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#924 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#924 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#924 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#923 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#923 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#539 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#535 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#533 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#171 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#171 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#171 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#170 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#169 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#540 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#540 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#540 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#536 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#165 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#534 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#926 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#926 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#926 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#926 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#926 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#926 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#926 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#925 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#925 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#925 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#927 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#926 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#541 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#537 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#535 irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#928 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#928 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#928 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#928 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#928 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#542 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#542 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#538 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#536 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#929 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#929 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#929 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#928 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#928 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#930 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#930 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#930 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#930 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#930 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#930 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#930 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#931 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#931 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#543 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#931 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#543 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#543 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#539 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#537 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#537 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#930 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#932 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#932 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#932 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#932 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#932 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#931 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#933 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#933 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#933 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#933 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#932 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#932 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#932 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#934 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#934 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#934 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#934 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#933 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#933 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#933 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_tx_wq#5 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#5 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc7_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#5 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_tx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#4 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#4 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc30_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_tx_wq#5 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc28_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc34_nci_rx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#544 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#544 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#540 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#540 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#538 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#935 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#935 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#935 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#935 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#935 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#934 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#934 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#934 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#172 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#172 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#171 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#170 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#37 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#37 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#36 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#36 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#173 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#173 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#173 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#173 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#172 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#171 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#6 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#8 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#936 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#936 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#936 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#935 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#935 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#541 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#541 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#539 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#38 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#38 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#37 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#37 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#174 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#174 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#173 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#172 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc41_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc41_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc41_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc41_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc40_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc40_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc39_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#2 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc35_nci_rx_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#39 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#39 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#38 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#38 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#175 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#175 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#174 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#173 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#173 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#173 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#546 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#546 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#546 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#546 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#542 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#540 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#937 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#937 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#937 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#936 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#938 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#938 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#938 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#938 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#938 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#938 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#176 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#176 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#175 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#174 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#174 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#174 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#547 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#547 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#939 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#939 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#939 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#939 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#543 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#541 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#939 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#938 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#938 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#938 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#177 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#177 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#176 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#940 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#940 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#940 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#940 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#940 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#940 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#940 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#939 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#939 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#939 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#178 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#177 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#176 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#548 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#548 irq_context: 0 &dev->mutex uevent_sock_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#941 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#941 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#941 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#940 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#940 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#940 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#942 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#942 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#942 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#942 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#942 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#942 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#941 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#549 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#545 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#543 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#943 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#943 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#943 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#943 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#943 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#943 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#943 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#942 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#942 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#942 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#550 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#550 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#546 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#544 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#178 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#177 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#944 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#944 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#943 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#551 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#551 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#551 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#551 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#547 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#545 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#945 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#945 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#945 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#945 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#945 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#945 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#945 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#944 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#944 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#944 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#552 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#552 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#552 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#552 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#548 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#546 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#553 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#553 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#553 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#549 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#554 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#554 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#554 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#550 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#946 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#946 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#548 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#946 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#945 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#180 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#180 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#180 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#179 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#178 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#40 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#39 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#39 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#947 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#947 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#947 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#947 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#947 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#946 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#551 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#549 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#948 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#948 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#948 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#947 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#552 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#552 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#550 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#949 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#949 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#949 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#949 irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#948 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#948 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#948 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#557 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#181 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#181 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#180 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#179 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#557 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#553 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#551 irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#950 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#950 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#950 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#950 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#950 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#949 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#951 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#951 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#950 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#558 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#558 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#554 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#552 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#952 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#952 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#952 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#952 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#952 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#951 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#559 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#555 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#555 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#553 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#182 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#182 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#953 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#953 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#953 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#953 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#953 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#952 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#182 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#560 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#560 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#560 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#181 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#180 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#560 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#556 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#554 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#954 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#954 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#955 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#955 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#955 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#955 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#954 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#561 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#561 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#183 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#557 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#555 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#182 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#182 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#182 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#181 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#181 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#181 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#956 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#956 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#956 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#956 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#956 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#956 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#955 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#957 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#956 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#956 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#562 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#562 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#562 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#562 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#558 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#558 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#558 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#556 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#563 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#563 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#559 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#958 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#958 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#958 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#958 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#957 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#557 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#184 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#184 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#184 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#184 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#183 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#183 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#183 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#182 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#182 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#182 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#564 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#560 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#560 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#560 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#558 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#558 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#558 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#959 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#959 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#959 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#185 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#185 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#185 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#185 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#184 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#184 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#184 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#183 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#183 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#183 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#41 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#41 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#40 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#40 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#960 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#960 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#40 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#40 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#960 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#959 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#960 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#960 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#960 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#565 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#565 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#565 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#186 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#186 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#186 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#565 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#561 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#561 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#561 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#559 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#185 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#185 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#185 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#962 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#962 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#962 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#962 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#961 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#963 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#963 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#963 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#963 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#963 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#962 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#962 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#962 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#566 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#566 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#562 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#560 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#964 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#187 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#187 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#964 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#964 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#186 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#42 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#42 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#963 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#185 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#185 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#185 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#41 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#41 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#567 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#563 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#561 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#41 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#41 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#41 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#965 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#965 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#965 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#965 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#965 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#964 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#568 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#564 irq_context: 0 &dev->mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &dev->mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#562 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#966 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#966 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#965 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#565 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#565 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#565 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#563 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#967 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#967 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#967 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#966 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#188 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#968 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#968 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#968 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#968 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#968 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#968 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#968 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#967 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#187 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#187 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#187 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#570 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#570 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#566 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#564 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#969 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#969 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#969 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#969 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#968 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#571 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#571 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#567 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#565 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#970 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#970 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#970 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#970 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#970 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#969 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#971 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#566 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#970 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#566 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#566 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#189 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#189 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#189 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#972 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#972 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#972 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#188 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#187 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#187 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#187 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#972 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#972 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#971 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#973 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#973 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#972 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#974 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#974 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#974 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#974 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#974 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#973 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#973 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#973 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#43 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#43 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#43 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#42 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#42 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#42 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#42 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#573 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#573 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#573 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#569 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#567 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#567 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#567 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#190 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#190 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#189 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#188 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#975 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#975 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#975 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#975 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#975 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#975 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#975 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#975 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#975 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#974 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#43 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#43 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#43 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#43 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#976 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#976 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#976 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#976 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#975 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#574 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#574 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#574 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#570 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#570 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#568 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#568 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#977 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#976 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#976 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#976 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#575 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#575 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#571 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#978 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#978 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#978 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#978 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#978 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#978 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#977 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#977 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#977 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#576 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#576 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#576 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#572 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#572 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#572 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#979 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#979 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#979 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#979 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#979 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#978 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#570 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#191 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#191 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#191 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#191 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#190 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#189 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#189 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#980 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#980 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#980 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#979 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#979 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#979 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#577 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#577 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#577 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#573 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#573 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#573 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#571 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#981 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#981 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#980 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#980 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#980 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#578 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#578 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#574 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#572 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#572 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#572 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#982 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#982 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#982 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#981 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#579 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#579 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#575 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#573 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#573 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#573 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#983 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#983 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#982 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#580 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#580 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#576 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#574 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#574 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#574 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#192 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#192 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#192 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#192 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#191 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#190 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#190 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#190 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &u->iolock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &u->iolock &mm->mmap_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#984 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#984 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#984 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#984 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#983 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#581 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#581 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#985 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#985 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#985 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#984 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#193 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#193 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#192 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#191 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#191 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#191 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#582 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#582 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#578 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#576 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#986 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#986 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#986 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#986 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#986 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#986 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#985 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#45 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#45 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#44 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#44 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#44 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#44 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#987 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#987 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#986 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#986 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#986 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#583 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#583 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#988 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#988 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#988 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#987 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#579 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#579 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#577 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#194 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#194 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#194 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#194 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#193 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#192 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#192 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#989 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#988 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#988 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#988 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#584 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#584 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#584 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#584 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#580 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#578 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#195 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#195 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#195 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#195 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#194 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#193 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#46 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#46 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#45 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#45 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#45 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#45 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#990 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#990 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#990 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#990 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#990 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#989 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#585 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#581 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#579 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#579 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#991 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#991 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#991 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#991 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#991 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#991 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#991 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#990 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#990 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#992 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#992 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#992 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#992 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#991 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#586 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#586 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#586 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#586 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#586 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#586 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#582 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#993 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#993 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#993 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#580 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#993 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#992 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#196 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#196 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#196 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#195 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#194 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#994 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#994 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#995 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#994 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#587 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#587 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#583 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#583 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#581 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#996 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#996 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#996 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#588 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#588 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#197 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#588 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#588 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#197 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#197 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#197 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#995 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#196 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#995 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#995 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#195 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#584 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#997 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#997 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#997 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#997 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#996 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#998 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#998 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#998 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#589 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#997 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#589 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#589 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#198 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#198 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#196 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#589 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#589 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#589 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#585 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#583 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#583 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#998 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#590 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#586 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#584 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#584 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#591 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#591 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#591 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#587 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1000 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1000 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#585 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1000 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#999 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1001 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1001 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1001 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1001 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1001 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1000 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#592 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#592 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#592 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#588 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#586 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1002 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1002 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1002 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1002 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1002 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1002 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1002 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1001 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#593 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#589 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#589 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#589 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#587 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#587 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#587 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1003 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1003 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1003 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1002 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#594 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#594 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#590 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1004 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1004 irq_context: 0 (wq_completion)wg-crypt-wg0#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1003 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1003 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1003 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#199 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#199 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#199 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#199 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#198 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#197 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1005 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1005 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1005 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1004 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1006 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1005 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1005 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1005 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#595 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#595 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#595 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#595 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#591 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#591 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#591 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1007 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1007 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1007 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1007 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1006 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#596 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#200 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#200 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#592 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#592 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#592 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#199 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#199 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#199 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#590 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#198 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1008 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1008 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1008 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1007 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1007 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1007 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#597 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#597 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#597 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#593 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#593 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#593 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#591 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1009 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1009 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1009 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1009 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1009 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1009 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1009 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1008 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#598 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#598 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#594 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#592 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#599 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#599 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#599 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1010 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1010 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#599 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#595 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1009 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1009 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1009 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#593 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#201 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#201 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#201 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#201 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#200 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#199 irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#47 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#47 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#47 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#46 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#46 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#46 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1011 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1011 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1011 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1010 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1012 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1012 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1012 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1012 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1012 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1011 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1011 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1011 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#600 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#600 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#600 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#596 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#594 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1013 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1013 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1013 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1012 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1012 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1014 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1014 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1014 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1014 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1013 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1013 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1013 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#601 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#601 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#601 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#601 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#601 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#601 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#597 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#595 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#595 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#595 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1015 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1015 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1015 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1014 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1014 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1014 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1016 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1016 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1016 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1015 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1015 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1015 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1017 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1017 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1017 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1016 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1018 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1018 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1018 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1018 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1018 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1018 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1017 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1017 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#602 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#602 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#598 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#596 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#161 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1019 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1019 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1019 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1019 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1019 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1018 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#603 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#603 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#599 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#597 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1020 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#313 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1020 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1020 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1020 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1020 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1020 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1020 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1019 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1019 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#604 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#604 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#600 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#600 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#600 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1020 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1020 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#605 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#605 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#601 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#599 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#599 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1021 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#602 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#602 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#203 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#203 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#203 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#203 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#202 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#201 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#201 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#201 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#600 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#600 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#600 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#603 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#601 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#601 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#601 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#204 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#204 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#204 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#204 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#203 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#203 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#203 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#202 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#202 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#202 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#608 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#608 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#608 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#608 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#604 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#602 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#602 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#602 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1023 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1022 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1024 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1024 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1024 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1024 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1024 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1023 irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#318 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#307 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#310 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1025 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1025 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1025 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1024 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#609 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#609 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#609 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#605 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#603 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#610 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#610 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#606 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#606 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#606 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#604 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1027 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1027 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1027 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#205 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#205 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1027 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1027 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1027 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#204 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#203 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1026 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1026 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1026 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1028 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1028 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1028 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1027 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1029 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1029 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1029 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1029 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#611 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1029 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#611 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#607 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#605 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1028 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1028 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1028 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#308 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1030 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1030 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1030 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1030 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1030 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1029 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#612 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#612 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#612 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#206 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#206 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#612 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#608 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#205 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#205 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#205 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#606 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#204 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#204 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#204 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#48 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#47 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#47 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1031 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#47 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1031 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1031 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1030 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1030 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1032 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1031 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#609 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#607 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#49 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#49 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#49 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#49 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#48 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#48 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#48 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#48 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#206 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#205 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1033 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1033 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1033 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1033 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1033 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1032 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#614 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1032 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#614 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1032 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#610 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#608 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#208 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#208 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#206 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1034 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1034 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1034 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1034 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1033 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#615 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#615 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#611 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#208 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#208 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#208 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#609 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#207 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#50 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#49 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#49 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1035 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1035 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#616 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#616 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#616 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1035 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1034 irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#616 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#612 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#610 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#610 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#610 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#210 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#209 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#209 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#209 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#208 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1036 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1036 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1035 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#617 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#617 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#613 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#611 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#211 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#211 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#210 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#209 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#209 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#209 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#51 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#50 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#50 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1037 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1037 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1037 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1037 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1037 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1037 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1036 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1038 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#618 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#618 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1038 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1038 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1037 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1037 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1037 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#614 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#614 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#612 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#612 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#612 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#212 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#212 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#211 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#210 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1039 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1039 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1039 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1039 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1039 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1039 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1039 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1040 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1040 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1040 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1039 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1041 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1041 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1041 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1041 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1041 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1040 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#619 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#615 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#613 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#213 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#213 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#212 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#211 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#620 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#620 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#614 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1042 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1042 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1042 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1041 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#52 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#52 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#51 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#51 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1043 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1043 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#621 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#621 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1043 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1043 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1042 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#617 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#615 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1044 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1044 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1044 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1044 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1043 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#622 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#622 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#618 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#616 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#214 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#214 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#213 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#212 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1045 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1045 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1045 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1045 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1044 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#623 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#623 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#623 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#623 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#619 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#619 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1046 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1046 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#617 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1046 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1045 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1045 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1045 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_rx_wq#13 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_rx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#8 irq_context: 0 (wq_completion)wg-crypt-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_rx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc20_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#214 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#213 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1047 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1047 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1046 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#624 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#624 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#620 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#618 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#53 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#53 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#52 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#52 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc23_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc35_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1048 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1048 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1048 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1048 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1048 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1047 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#625 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#625 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#621 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1049 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1049 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1049 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1049 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1048 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1048 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1048 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#626 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#626 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#622 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#620 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1050 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1050 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1050 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1049 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#627 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1051 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#627 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1051 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#623 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#623 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#623 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1051 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#623 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#623 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#621 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#621 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#621 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1050 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#215 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#214 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1052 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1052 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1052 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1051 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1053 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1053 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1053 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1052 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1052 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1052 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#628 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#628 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#624 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#622 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#622 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#622 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1054 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1054 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1054 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1054 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1054 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1053 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1053 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1053 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#625 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#623 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1055 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1055 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1055 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1055 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1054 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#630 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#630 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#630 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#630 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#626 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#626 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#626 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#624 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#624 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1056 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1056 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1056 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#631 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#631 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#631 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1057 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1057 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1057 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1057 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#625 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1057 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1057 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1057 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1056 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#632 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#632 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#628 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1058 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1058 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1058 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1058 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1058 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1057 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1057 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1057 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1059 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1059 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1059 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1059 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1059 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1058 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1058 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1058 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#633 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#633 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#633 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#633 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#633 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1060 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1060 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1060 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1059 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#629 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#629 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#629 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#627 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#627 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#627 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#217 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#217 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#216 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#215 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1060 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#634 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#634 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#630 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#630 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#630 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#628 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#218 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#218 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#218 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#218 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#217 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#216 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1062 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1062 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1062 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1061 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#54 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#54 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#53 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#53 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#55 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#55 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#55 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#54 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#54 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#54 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#54 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &base->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx (wq_completion)nfc20_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_tx_wq#10 irq_context: 0 misc_mtx (wq_completion)nfc20_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc20_nci_rx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_rx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#56 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#56 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#55 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#55 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#55 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#55 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1063 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1063 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1063 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1062 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#219 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#219 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#218 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#217 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#635 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#635 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#631 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#629 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#630 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#630 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1064 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1064 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1064 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1064 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1063 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#220 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#220 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#219 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#218 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#218 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#218 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#221 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#220 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#219 irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#19 irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#19 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#20 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#222 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#222 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#221 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#220 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1065 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1065 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1065 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1064 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1064 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1064 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#21 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#57 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#57 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#56 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#56 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#224 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#224 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#224 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#637 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#637 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#637 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#637 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#633 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#631 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#631 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#631 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#222 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#221 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1066 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1066 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1066 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1066 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1066 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1065 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#638 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#638 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#638 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#634 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#632 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1067 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1067 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1066 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1068 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1068 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1068 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1068 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1068 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1068 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1068 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#639 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#639 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#639 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#635 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#635 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#635 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#633 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#633 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#633 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1069 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1069 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1069 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1069 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1069 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1068 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1070 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1069 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1069 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1069 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#640 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#640 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#636 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#634 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#634 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#637 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#637 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#635 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#225 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#225 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#225 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#225 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#223 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#58 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#58 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#58 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#223 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#223 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#58 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#57 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#57 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#222 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#222 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#222 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1071 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1071 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1071 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1071 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1070 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1072 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#642 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#642 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#642 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#642 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#638 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1072 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1072 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#636 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1071 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1071 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#226 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#226 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#226 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#226 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#224 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#223 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1073 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1073 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1073 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1073 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1072 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1072 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1072 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#643 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#643 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#643 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#643 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#639 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#639 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#639 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#637 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1074 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1074 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1074 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1073 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1075 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1075 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1075 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1075 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1075 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1074 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#640 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#640 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#638 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#227 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#227 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1076 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#225 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1076 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1076 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#225 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#225 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#224 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1076 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1076 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1075 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1077 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1077 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1077 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1077 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1077 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1077 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1077 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1076 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1076 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1076 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#641 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#639 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1078 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1078 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1078 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1078 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1077 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1079 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1079 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1078 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1078 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1078 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#646 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#646 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#646 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#646 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#642 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#640 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#228 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#228 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#226 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#226 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#226 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#225 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#225 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#225 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1080 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1080 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1079 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#647 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#647 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#643 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#641 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1081 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1081 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1081 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1081 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#648 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1081 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#648 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#648 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1080 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#648 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#644 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#642 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1082 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1082 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1082 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1082 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1081 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#649 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#649 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#649 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#649 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#645 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#643 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1083 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1083 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1083 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1083 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1082 irq_context: 0 &hdev->req_lock (wq_completion)hci2#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex bpf_devs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex bpf_devs_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1084 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1084 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1084 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#650 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#650 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#650 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#650 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#646 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#646 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1084 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#644 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1084 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1083 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#229 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#229 irq_context: 0 (wq_completion)hci3#3 irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#227 irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#226 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->cmd_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &c->lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &n->list_lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#3 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci3#4 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci3#4 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1085 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1085 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1085 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1085 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1085 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1084 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1084 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1084 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1086 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1086 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1086 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1085 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1085 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1088 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#230 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#230 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#230 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1086 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#651 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#647 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#645 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#230 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#228 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#652 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#648 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#646 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#646 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#646 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg2#152 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#13 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#307 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#17 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#24 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#24 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#24 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#24 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#24 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#60 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#60 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#60 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#60 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#59 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#59 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#232 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#232 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#230 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#229 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#665 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#665 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#661 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#659 irq_context: softirq slock-AF_INET#2 &c->lock irq_context: softirq slock-AF_INET#2 &____s->seqcount irq_context: softirq slock-AF_INET#2 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg1#155 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1115 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1115 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1115 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1115 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1115 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1115 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1115 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1113 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1116 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1116 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1116 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1116 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1116 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1114 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1114 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1114 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1117 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1117 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1118 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1116 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1116 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg0#161 irq_context: 0 (wq_completion)wg-kex-wg0#319 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#319 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#311 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#320 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#156 irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#312 irq_context: 0 (wq_completion)wg-kex-wg1#312 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1090 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1090 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1090 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1090 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1090 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1088 irq_context: 0 (wq_completion)wg-kex-wg2#308 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1119 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1119 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#653 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#653 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#653 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#653 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#649 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#647 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1091 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1091 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1089 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1089 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1089 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#308 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#320 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#309 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#311 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#317 irq_context: 0 (wq_completion)wg-kex-wg1#312 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#312 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 &hdev->req_lock (wq_completion)hci0#4 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1119 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#309 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#153 irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)bond0#171 irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#10 irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#171 irq_context: 0 rtnl_mutex team->team_lock_key#171 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#171 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#171 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#171 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#171 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#171 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#171 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#171 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#171 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#171 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#171 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#171 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 lock irq_context: 0 rtnl_mutex team->team_lock_key#171 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#171 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#171 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#171 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#171 &____s->seqcount#2 irq_context: 0 rtnl_mutex team->team_lock_key#171 &____s->seqcount irq_context: 0 rtnl_mutex team->team_lock_key#171 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#171 (console_sem).lock irq_context: 0 rtnl_mutex team->team_lock_key#171 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#171 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#171 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#171 console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 rtnl_mutex team->team_lock_key#171 lock kernfs_idr_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#171 net_rwsem &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#171 net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#11 irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1117 irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg1#156 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#318 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1120 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1118 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)bond0#170 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)bond0#170 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#159 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1121 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1121 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1119 irq_context: 0 wq_pool_mutex &wq->mutex rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1122 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1122 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1122 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1122 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1122 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1120 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1120 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1120 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1120 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#171 &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &list->lock#6 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1092 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1092 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &list->lock#6 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1092 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1123 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1123 irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1123 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1123 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1090 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1123 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1121 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#654 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#654 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#654 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#654 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#650 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#648 irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#231 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#229 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#228 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#59 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#59 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#58 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#58 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_send#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_crypto#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1093 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1093 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1091 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#11 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#11 irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &____s->seqcount#2 irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1094 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1094 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1094 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1094 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1094 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1092 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1092 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1092 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#9 irq_context: 0 (wq_completion)bond0#171 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 (wq_completion)nfc22_nci_rx_wq#9 irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &n->list_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->cmd_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)nfc22_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#9 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#6 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#5 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci0#6 (work_completion)(&hdev->tx_work) &list->lock#6 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci0#6 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#663 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#663 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#663 &cfs_rq->removed.lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#663 &obj_hash[i].lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#667 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#667 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#12 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#667 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#171 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1124 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &base->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1125 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1125 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1125 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1125 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1125 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#233 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#233 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#231 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1123 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#231 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#231 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#668 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#668 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#668 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#668 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#664 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#661 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1124 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1124 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#669 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#669 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#665 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &n->list_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#5 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#9 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#9 irq_context: 0 rtnl_mutex cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#25 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#25 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#25 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#61 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#61 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#60 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#60 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#60 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#60 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#25 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#23 irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg0#162 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)bond0#172 &rq->__lock irq_context: 0 (wq_completion)bond0#172 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#172 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#172 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#172 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#172 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#172 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#172 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#11 irq_context: 0 rtnl_mutex team->team_lock_key#172 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#172 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#172 net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#172 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#172 &c->lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1095 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1095 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1095 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1095 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1093 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1093 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1093 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#172 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#172 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#172 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#172 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#172 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#172 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 lock irq_context: 0 rtnl_mutex team->team_lock_key#172 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#172 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#172 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 lweventlist_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#172 &____s->seqcount#2 irq_context: 0 rtnl_mutex team->team_lock_key#172 &____s->seqcount irq_context: 0 rtnl_mutex team->team_lock_key#172 (console_sem).lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#7 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#7 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#172 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#172 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#172 console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#7 irq_context: 0 rtnl_mutex team->team_lock_key#172 quarantine_lock irq_context: 0 (wq_completion)nfc32_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1096 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1096 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1096 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1096 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1094 irq_context: 0 rtnl_mutex team->team_lock_key#172 remove_cache_srcu irq_context: 0 rtnl_mutex team->team_lock_key#172 remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#172 remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#172 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc27_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc27_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#172 &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1127 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1127 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1125 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1125 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc40_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc40_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#670 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#670 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#670 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#670 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#666 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#663 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#663 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#663 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#234 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#234 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#234 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#234 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)nfc4_nci_rx_wq#232 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#231 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#231 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#18 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_node_0 irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1097 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1097 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1097 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1097 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1097 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1097 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1095 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#655 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#655 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#655 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#655 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#651 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#649 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#649 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_rx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1098 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1098 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1098 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1098 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1098 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1098 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1096 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1096 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1096 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#656 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#656 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#656 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#656 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#652 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#650 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1099 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1099 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1099 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1099 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1099 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1097 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#657 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#657 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#657 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#653 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#62 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#62 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#651 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#61 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#61 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#26 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#26 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex bpf_devs_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#171 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#171 &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1100 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1100 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1100 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#171 &devlink_port->type_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#26 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#24 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#24 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_rx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc43_nci_cmd_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc43_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc43_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#12 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#12 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc13_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#14 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#27 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#27 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#27 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#27 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#25 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#25 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#25 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#671 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#671 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#667 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#664 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc40_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc40_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc40_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_rx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc39_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc39_nci_tx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#668 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#665 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#665 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#665 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc36_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_rx_wq#5 irq_context: 0 (wq_completion)bond0#172 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#172 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#172 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)nfc35_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#11 irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc34_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc35_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc35_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 &rcu_state.expedited_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc41_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc41_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc41_nci_cmd_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc41_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc41_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc41_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1128 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1128 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1126 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1126 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1126 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#235 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#235 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc22_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#233 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#232 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc42_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc42_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#63 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#63 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#62 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) rcu_node_0 irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#6 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#62 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#28 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#28 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#26 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#26 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#673 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#673 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#669 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#666 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#17 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#17 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_rx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#674 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#674 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#670 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#667 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#29 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#29 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#27 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#27 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#17 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#64 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#64 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#63 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#63 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_rx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#13 irq_context: 0 nfc_devlist_mutex remove_cache_srcu &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#236 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#236 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#234 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#233 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1129 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1129 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1129 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1129 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1129 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1127 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1130 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1130 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1130 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1128 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1128 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#675 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#675 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#671 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#668 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#668 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1131 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1131 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1131 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1131 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1131 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1131 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1131 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1129 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#676 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1129 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#676 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1129 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#672 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#669 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#237 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#237 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#235 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#235 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#235 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#159 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &c->lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#234 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#2 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#2 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &tb->tb6_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_send#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1132 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1132 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex smc_ib_devices.mutex irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &vn->sock_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1132 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_crypto#3 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1130 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#172 rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq