/linux/drivers/crypto/ccp/ |
H A D | ccp-crypto-main.c | 64 static struct ccp_crypto_queue req_queue; variable 106 list_for_each_entry_continue(tmp, &req_queue.cmds, entry) { in ccp_crypto_cmd_complete() 117 if (req_queue.backlog != &req_queue.cmds) { in ccp_crypto_cmd_complete() 119 if (req_queue.backlog == &crypto_cmd->entry) in ccp_crypto_cmd_complete() 120 req_queue.backlog = crypto_cmd->entry.next; in ccp_crypto_cmd_complete() 122 *backlog = container_of(req_queue.backlog, in ccp_crypto_cmd_complete() 124 req_queue.backlog = req_queue.backlog->next; in ccp_crypto_cmd_complete() 127 if (req_queue in ccp_crypto_cmd_complete() [all...] |
/linux/drivers/s390/char/ |
H A D | tape_core.c | 142 if (list_empty(&tdev->req_queue)) in tape_operation_show() 147 req = list_entry(tdev->req_queue.next, struct tape_request, in tape_operation_show() 494 INIT_LIST_HEAD(&device->req_queue); in tape_alloc_device() 596 list_for_each_safe(l, n, &device->req_queue) { in __tape_discard_requests() 767 list_for_each_safe(l, n, &device->req_queue) { in __tape_start_next_request() 829 request = list_entry(device->req_queue.next, struct tape_request, list); in tape_long_busy_timeout() 857 if (!list_empty(&device->req_queue)) in __tape_end_request() 916 if (list_empty(&device->req_queue)) { in __tape_start_request() 923 list_add(&request->list, &device->req_queue); in __tape_start_request() 927 list_add_tail(&request->list, &device->req_queue); in __tape_start_request() [all...] |
H A D | tape_proc.c | 71 if (!list_empty(&device->req_queue)) { in tape_proc_show() 72 request = list_entry(device->req_queue.next, in tape_proc_show()
|
H A D | raw3270.c | 50 struct list_head req_queue; /* Request queue. */ member 255 if (list_empty(&rp->req_queue) && in __raw3270_start() 265 list_add_tail(&rq->list, &rp->req_queue); in __raw3270_start() 334 list_add_tail(&rq->list, &rp->req_queue); in raw3270_start_irq() 391 while (!list_empty(&rp->req_queue)) { in raw3270_irq() 392 rq = list_entry(rp->req_queue.next, struct raw3270_request, list); in raw3270_irq() 687 while (!list_empty(&rp->req_queue)) { in __raw3270_disconnect() 688 rq = list_entry(rp->req_queue.next, struct raw3270_request, list); in __raw3270_disconnect() 751 INIT_LIST_HEAD(&rp->req_queue); in raw3270_setup_device()
|
H A D | tape.h | 209 struct list_head req_queue; member
|
/linux/drivers/usb/usbip/ |
H A D | vudc_dev.c | 77 while (!list_empty(&ep->req_queue)) { in nuke() 78 req = list_first_entry(&ep->req_queue, struct vrequest, in nuke() 327 list_add_tail(&req->req_entry, &ep->req_queue); in vep_queue() 353 list_for_each_entry(lst, &ep->req_queue, req_entry) { in vep_dequeue() 389 !list_empty(&ep->req_queue)) in vep_set_halt_and_wedge() 543 INIT_LIST_HEAD(&ep->req_queue); in init_vudc_hw()
|
H A D | vudc.h | 32 struct list_head req_queue; /* Request queue */ member
|
H A D | vudc_sysfs.c | 47 usb_req = list_last_entry(&ep0->req_queue, struct vrequest, req_entry); in get_gadget_descs()
|
H A D | vudc_transfer.c | 190 list_for_each_entry(req, &ep->req_queue, req_entry) { in transfer()
|
/linux/drivers/crypto/ccree/ |
H A D | cc_request_mgr.c | 20 struct cc_crypto_req req_queue[MAX_REQUEST_QUEUE_SIZE]; member 295 req_mgr_h->req_queue[req_mgr_h->req_queue_head] = *cc_req; in cc_do_send_request() 580 cc_req = &request_mgr_handle->req_queue[*tail]; in proc_completions()
|
/linux/include/media/ |
H A D | media-device.h | 54 * @req_queue: Queue a validated request, cannot fail. If something goes 64 * or delete objects from the request before req_queue exits. 72 void (*req_queue)(struct media_request *req); member
|
H A D | v4l2-device.h | 253 v4l2_dev->mdev->ops->req_queue; in v4l2_device_supports_requests()
|
/linux/drivers/media/mc/ |
H A D | mc-request.c | 156 * and call req_queue. The reason we set the state first is that this in media_request_ioctl_queue() 157 * allows req_queue to unbind or complete the queued objects in case in media_request_ioctl_queue() 161 * after each object is queued through the req_queue op (and note that in media_request_ioctl_queue() 167 * while req_queue is called, so that's safe as well. in media_request_ioctl_queue() 175 mdev->ops->req_queue(req); in media_request_ioctl_queue() 252 !mdev->ops->req_validate || !mdev->ops->req_queue) in media_request_get_by_fd()
|
/linux/drivers/media/test-drivers/visl/ |
H A D | visl-core.c | 431 .req_queue = v4l2_m2m_request_queue,
|
/linux/fs/fuse/ |
H A D | dev_uring.c | 793 struct list_head *req_queue = &queue->fuse_req_queue; in fuse_uring_ent_assign_req() local 798 req = list_first_entry_or_null(req_queue, struct fuse_req, list); in fuse_uring_ent_assign_req()
|
/linux/drivers/media/platform/mediatek/vcodec/decoder/ |
H A D | mtk_vcodec_dec_stateless.c | 734 .req_queue = v4l2_m2m_request_queue,
|
/linux/drivers/media/platform/nvidia/tegra-vde/ |
H A D | v4l2.c | 918 .req_queue = v4l2_m2m_request_queue,
|
/linux/drivers/media/platform/rockchip/rkvdec/ |
H A D | rkvdec.c | 745 .req_queue = v4l2_m2m_request_queue,
|
/linux/drivers/media/test-drivers/ |
H A D | vim2m.c | 1506 .req_queue = v4l2_m2m_request_queue,
|
/linux/drivers/media/test-drivers/vivid/ |
H A D | vivid-core.c | 885 .req_queue = vb2_request_queue,
|
/linux/drivers/media/test-drivers/vicodec/ |
H A D | vicodec-core.c | 2033 .req_queue = v4l2_m2m_request_queue,
|