Lines Matching refs:work_queue

75 			work_queue(struct work_struct *);
329 KASSERT(work_queue(&dw->work) == wq); in destroy_workqueue()
437 KASSERT(work_queue(work) == wq); in linux_workqueue_thread()
490 struct workqueue_struct *const wq = work_queue(&dw->work); in linux_workqueue_timeout()
499 KASSERT(work_queue(&dw->work) == wq); in linux_workqueue_timeout()
579 KASSERT(work_queue(work) == wq); in work_claimed()
605 work_queue(struct work_struct *work) in work_queue() function
641 KASSERT(work_queue(work) == wq); in acquire_work()
659 KASSERT(work_queue(work) == wq); in release_work()
747 if ((wq = work_queue(work)) == NULL) in cancel_work()
751 if (__predict_false(work_queue(work) != wq)) { in cancel_work()
796 if ((wq = work_queue(work)) == NULL) in cancel_work_sync()
800 if (__predict_false(work_queue(work) != wq)) { in cancel_work_sync()
912 KASSERT(work_queue(&dw->work) == wq); in dw_callout_init()
931 KASSERT(work_queue(&dw->work) == wq); in dw_callout_destroy()
954 KASSERT(work_queue(&dw->work) == wq); in cancel_delayed_work_done()
1243 if ((wq = work_queue(&dw->work)) == NULL) in cancel_delayed_work()
1247 if (__predict_false(work_queue(&dw->work) != wq)) { in cancel_delayed_work()
1333 if ((wq = work_queue(&dw->work)) == NULL) in cancel_delayed_work_sync()
1337 if (__predict_false(work_queue(&dw->work) != wq)) { in cancel_delayed_work_sync()
1527 if ((wq = work_queue(work)) == NULL) in flush_work()
1548 if ((wq = work_queue(&dw->work)) == NULL) in flush_delayed_work()
1552 if (__predict_false(work_queue(&dw->work) != wq)) { in flush_delayed_work()
1646 struct workqueue_struct *wq = work_queue(&rw->work); in queue_rcu_work_cb()
1650 KASSERT(work_queue(&rw->work) == wq); in queue_rcu_work_cb()