Home
last modified time | relevance | path

Searched refs:workq (Results 1 – 25 of 53) sorted by relevance

123

/linux/drivers/net/ethernet/huawei/hinic3/
H A Dhinic3_mgmt.c146 queue_work(pf_to_mgmt->workq, &mgmt_work->work); in hinic3_init_mgmt_msg_work()
262 pf_to_mgmt->workq = create_singlethread_workqueue(HINIC3_MGMT_WQ_NAME); in hinic3_pf_to_mgmt_init()
263 if (!pf_to_mgmt->workq) { in hinic3_pf_to_mgmt_init()
278 destroy_workqueue(pf_to_mgmt->workq); in hinic3_pf_to_mgmt_init()
292 destroy_workqueue(pf_to_mgmt->workq); in hinic3_pf_to_mgmt_free()
301 flush_workqueue(hwdev->aeqs->workq); in hinic3_flush_mgmt_workq()
304 flush_workqueue(hwdev->pf_to_mgmt->workq); in hinic3_flush_mgmt_workq()
H A Dhinic3_eqs.c304 queue_work(aeqs->workq, &eq->aeq_work); in reschedule_aeq_handler()
334 struct workqueue_struct *workq; in aeq_interrupt() local
343 workq = aeqs->workq; in aeq_interrupt()
346 queue_work(workq, &aeq->aeq_work); in aeq_interrupt()
658 aeqs->workq = alloc_workqueue(HINIC3_EQS_WQ_NAME, WQ_MEM_RECLAIM | WQ_PERCPU, in hinic3_aeqs_init()
660 if (!aeqs->workq) { in hinic3_aeqs_init()
688 destroy_workqueue(aeqs->workq); in hinic3_aeqs_init()
711 destroy_workqueue(aeqs->workq); in hinic3_aeqs_free()
H A Dhinic3_hwdev.c491 queue_delayed_work(hwdev->workq, &hwdev->sync_time_task, in hinic3_auto_sync_time_work()
501 queue_delayed_work(hwdev->workq, &hwdev->sync_time_task, in hinic3_init_ppf_work()
549 hwdev->workq = alloc_workqueue(HINIC3_HWDEV_WQ_NAME, WQ_MEM_RECLAIM | WQ_PERCPU, in hinic3_init_hwdev()
551 if (!hwdev->workq) { in hinic3_init_hwdev()
593 destroy_workqueue(hwdev->workq); in hinic3_init_hwdev()
613 destroy_workqueue(hwdev->workq); in hinic3_free_hwdev()
H A Dhinic3_main.c132 queue_delayed_work(nic_dev->workq, &nic_dev->periodic_work, HZ); in hinic3_periodic_work_handler()
155 nic_dev->workq = create_singlethread_workqueue(HINIC3_NIC_DEV_WQ_NAME); in hinic3_init_nic_dev()
156 if (!nic_dev->workq) { in hinic3_init_nic_dev()
406 destroy_workqueue(nic_dev->workq); in hinic3_free_nic_dev()
463 queue_delayed_work(nic_dev->workq, &nic_dev->periodic_work, HZ); in hinic3_nic_probe()
H A Dhinic3_mgmt.h39 struct workqueue_struct *workq; member
H A Dhinic3_hwdev.h71 struct workqueue_struct *workq; member
H A Dhinic3_eqs.h81 struct workqueue_struct *workq; member
H A Dhinic3_nic_dev.h126 struct workqueue_struct *workq; member
H A Dhinic3_mbox.c400 mbox->workq = create_singlethread_workqueue(HINIC3_MBOX_WQ_NAME); in hinic3_mbox_pre_init()
401 if (!mbox->workq) { in hinic3_mbox_pre_init()
450 destroy_workqueue(mbox->workq); in hinic3_init_mbox()
461 destroy_workqueue(mbox->workq); in hinic3_free_mbox()
H A Dhinic3_mbox.h120 struct workqueue_struct *workq; member
/linux/include/linux/
H A DmISDNhw.h75 schedule_work(&((s)->workq)); \
81 struct work_struct workq; member
144 struct work_struct workq; member
/linux/drivers/isdn/mISDN/
H A Dhwchannel.c16 struct dchannel *dch = container_of(ws, struct dchannel, workq); in dchannel_bh()
39 struct bchannel *bch = container_of(ws, struct bchannel, workq); in bchannel_bh()
69 INIT_WORK(&ch->workq, dchannel_bh); in mISDN_initdchannel()
92 INIT_WORK(&ch->workq, bchannel_bh); in mISDN_initbchannel()
110 flush_work(&ch->workq); in mISDN_freedchannel()
150 cancel_work_sync(&ch->workq); in mISDN_freebchannel()
H A Dl1oip.h68 struct work_struct workq; member
H A Ddsp_core.c682 schedule_work(&dsp->workq); in dsp_function()
889 schedule_work(&dsp->workq); in dsp_function()
975 cancel_work_sync(&dsp->workq); in dsp_ctrl()
1007 struct dsp *dsp = container_of(work, struct dsp, workq); in dsp_send_bh()
1065 INIT_WORK(&ndsp->workq, (void *)dsp_send_bh); in dspcreate()
H A Dstack.c31 wake_up_interruptible(&st->workq); in _queue_message()
280 wait_event_interruptible(st->workq, (st->status & in mISDNStackd()
378 init_waitqueue_head(&newst->workq); in create_stack()
638 wake_up_interruptible(&st->workq); in delete_stack()
/linux/drivers/hwmon/
H A Dxgene-hwmon.c100 struct work_struct workq; member
436 ctx = container_of(work, struct xgene_hwmon_dev, workq); in xgene_hwmon_evt_work()
516 schedule_work(&ctx->workq); in xgene_hwmon_rx_cb()
586 schedule_work(&ctx->workq); in xgene_hwmon_pcc_rx_cb()
632 INIT_WORK(&ctx->workq, xgene_hwmon_evt_work); in xgene_hwmon_probe()
706 schedule_work(&ctx->workq); in xgene_hwmon_probe()
727 cancel_work_sync(&ctx->workq); in xgene_hwmon_remove()
/linux/drivers/net/ethernet/huawei/hinic/
H A Dhinic_hw_mgmt.c465 queue_work(pf_to_mgmt->workq, &mgmt_work->work); in mgmt_recv_msg_handler()
635 pf_to_mgmt->workq = create_singlethread_workqueue("hinic_mgmt"); in hinic_pf_to_mgmt_init()
636 if (!pf_to_mgmt->workq) { in hinic_pf_to_mgmt_init()
646 destroy_workqueue(pf_to_mgmt->workq); in hinic_pf_to_mgmt_init()
654 destroy_workqueue(pf_to_mgmt->workq); in hinic_pf_to_mgmt_init()
679 destroy_workqueue(pf_to_mgmt->workq); in hinic_pf_to_mgmt_free()
H A Dhinic_hw_eqs.c399 queue_work(aeqs->workq, &aeq_work->work); in aeq_interrupt()
867 aeqs->workq = create_singlethread_workqueue(HINIC_EQS_WQ_NAME); in hinic_aeqs_init()
868 if (!aeqs->workq) in hinic_aeqs_init()
889 destroy_workqueue(aeqs->workq); in hinic_aeqs_init()
904 destroy_workqueue(aeqs->workq); in hinic_aeqs_free()
H A Dhinic_dev.h95 struct workqueue_struct *workq; member
H A Dhinic_hw_mbox.c500 queue_work(func_to_func->workq, &mbox_work->work); in recv_mbox_handler()
614 queue_work(func_to_func->workq, &mbox_work->work); in check_vf_mbox_random_id()
1415 func_to_func->workq = create_singlethread_workqueue(HINIC_MBOX_WQ_NAME); in hinic_func_to_func_init()
1416 if (!func_to_func->workq) { in hinic_func_to_func_init()
1460 destroy_workqueue(func_to_func->workq); in hinic_func_to_func_init()
1479 destroy_workqueue(func_to_func->workq); in hinic_func_to_func_free()
H A Dhinic_hw_mgmt.h144 struct workqueue_struct *workq; member
/linux/drivers/gpu/drm/msm/hdmi/
H A Dhdmi.c71 if (hdmi->workq) in msm_hdmi_destroy()
72 destroy_workqueue(hdmi->workq); in msm_hdmi_destroy()
126 hdmi->workq = alloc_ordered_workqueue("msm_hdmi", 0); in msm_hdmi_init()
127 if (!hdmi->workq) { in msm_hdmi_init()
/linux/drivers/net/ethernet/netronome/nfp/
H A Dccm_mbox.c140 queue_work(nn->mbox_cmsg.workq, &nn->mbox_cmsg.runq_work); in nfp_ccm_mbox_mark_next_runner()
671 queue_work(nn->mbox_cmsg.workq, in nfp_ccm_mbox_post()
723 drain_workqueue(nn->mbox_cmsg.workq); in nfp_ccm_mbox_clean()
733 nn->mbox_cmsg.workq = alloc_workqueue("nfp-ccm-mbox", WQ_UNBOUND, 0); in nfp_ccm_mbox_alloc()
734 if (!nn->mbox_cmsg.workq) in nfp_ccm_mbox_alloc()
741 destroy_workqueue(nn->mbox_cmsg.workq); in nfp_ccm_mbox_free()
/linux/drivers/crypto/ccree/
H A Dcc_request_mgr.c39 struct workqueue_struct *workq; member
104 destroy_workqueue(req_mgr_h->workq); in cc_req_mgr_fini()
133 req_mgr_h->workq = create_singlethread_workqueue("ccree"); in cc_req_mgr_init()
134 if (!req_mgr_h->workq) { in cc_req_mgr_init()
538 queue_delayed_work(request_mgr_handle->workq, in complete_request()
/linux/drivers/char/xillybus/
H A Dxillyusb.c170 struct workqueue_struct *workq; member
558 if (xdev->workq) in cleanup_dev()
559 destroy_workqueue(xdev->workq); in cleanup_dev()
673 queue_work(ep->xdev->workq, &ep->workitem); in bulk_in_completer()
694 queue_work(ep->xdev->workq, &ep->workitem); in bulk_out_completer()
2117 flush_workqueue(xdev->workq); in xillyusb_discovery()
2166 xdev->workq = alloc_workqueue(xillyname, WQ_HIGHPRI | WQ_UNBOUND, 0); in xillyusb_probe()
2168 if (!xdev->workq) { in xillyusb_probe()

123