Home
last modified time | relevance | path

Searched refs:nvme_sq (Results 1 – 8 of 8) sorted by relevance

/linux/drivers/nvme/target/
H A Dloop.c55 struct nvmet_sq nvme_sq; member
96 container_of(req->sq, struct nvme_loop_queue, nvme_sq); in nvme_loop_queue_response()
153 if (!nvmet_req_init(&iod->req, &queue->nvme_sq, &nvme_loop_ops)) in nvme_loop_queue_rq()
185 if (!nvmet_req_init(&iod->req, &queue->nvme_sq, &nvme_loop_ops)) { in nvme_loop_submit_async_event()
275 nvmet_sq_destroy(&ctrl->queues[0].nvme_sq); in nvme_loop_destroy_admin_queue()
305 nvmet_sq_destroy(&ctrl->queues[i].nvme_sq); in nvme_loop_destroy_io_queues()
333 ret = nvmet_sq_init(&ctrl->queues[i].nvme_sq, in nvme_loop_init_io_queues()
370 error = nvmet_sq_init(&ctrl->queues[0].nvme_sq, in nvme_loop_configure_admin_queue()
413 nvmet_sq_destroy(&ctrl->queues[0].nvme_sq); in nvme_loop_configure_admin_queue()
H A Dtcp.c153 struct nvmet_sq nvme_sq; member
402 if (queue->nvme_sq.ctrl) in nvmet_tcp_fatal_error()
403 nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl); in nvmet_tcp_fatal_error()
484 pdu->hdr.flags = NVME_TCP_F_DATA_LAST | (queue->nvme_sq.sqhd_disabled ? in nvmet_setup_c2h_data_pdu()
670 queue->data_digest || !queue->nvme_sq.sqhd_disabled) in nvmet_try_send_data()
693 if (queue->nvme_sq.sqhd_disabled) { in nvmet_try_send_data()
701 if (queue->nvme_sq.sqhd_disabled) in nvmet_try_send_data()
793 if (queue->nvme_sq.sqhd_disabled) { in nvmet_try_send_ddgst()
1070 if (unlikely(!nvmet_req_init(req, &queue->nvme_sq, &nvmet_tcp_ops))) { in nvmet_tcp_done_recv_pdu()
1604 nvmet_sq_put_tls_key(&queue->nvme_sq); in nvmet_tcp_release_queue_work()
[all …]
H A Drdma.c100 struct nvmet_sq nvme_sq; member
680 if (queue->nvme_sq.ctrl) { in nvmet_rdma_error_comp()
681 nvmet_ctrl_fatal_error(queue->nvme_sq.ctrl); in nvmet_rdma_error_comp()
950 queue->nvme_sq.ctrl->cntlid); in nvmet_rdma_execute_command()
978 if (!nvmet_req_init(&cmd->req, &queue->nvme_sq, &nvmet_rdma_ops)) in nvmet_rdma_handle_command()
1353 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_rdma_free_queue()
1439 ret = nvmet_sq_init(&queue->nvme_sq, &queue->nvme_cq); in nvmet_rdma_alloc_queue()
1518 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_rdma_alloc_queue()
1596 if (q->nvme_sq.ctrl == queue->nvme_sq.ctrl && in nvmet_rdma_queue_connect()
1825 if (queue->nvme_sq.ctrl != ctrl) in nvmet_rdma_delete_ctrl()
[all …]
H A Dfc.c143 struct nvmet_sq nvme_sq; member
817 ret = nvmet_sq_init(&queue->nvme_sq, &queue->nvme_cq); in nvmet_fc_alloc_target_queue()
935 nvmet_sq_destroy(&queue->nvme_sq); in nvmet_fc_delete_target_queue()
1569 if (queue && queue->nvme_sq.ctrl == ctrl) { in nvmet_fc_delete_ctrl()
2566 ret = nvmet_req_init(&fod->req, &fod->queue->nvme_sq, in nvmet_fc_handle_fcp_rqst()
2965 container_of(sq, struct nvmet_fc_tgt_queue, nvme_sq); in nvmet_fc_host_traddr()
H A Dpci-epf.c81 struct nvmet_sq nvme_sq; member
1394 status = nvmet_sq_create(tctrl, &sq->nvme_sq, &cq->nvme_cq, sqid, in nvmet_pci_epf_create_sq()
1415 nvmet_sq_destroy(&sq->nvme_sq); in nvmet_pci_epf_create_sq()
1432 if (sq->nvme_sq.ctrl) in nvmet_pci_epf_delete_sq()
1433 nvmet_sq_destroy(&sq->nvme_sq); in nvmet_pci_epf_delete_sq()
1614 if (!nvmet_req_init(req, &iod->sq->nvme_sq, &nvmet_pci_epf_fabrics_ops)) in nvmet_pci_epf_exec_iod_work()
H A Dnvmet.h425 u16 (*install_queue)(struct nvmet_sq *nvme_sq);
/linux/drivers/nvme/host/
H A Dtrace.h141 TRACE_EVENT(nvme_sq,
H A Dtrace.c500 EXPORT_TRACEPOINT_SYMBOL_GPL(nvme_sq);