Home
last modified time | relevance | path

Searched refs:nvme_req (Results 1 – 13 of 13) sorted by relevance

/linux/drivers/nvme/host/
H A Dtrace.h63 __entry->ctrl_id = nvme_req(req)->ctrl->instance;
99 __entry->ctrl_id = nvme_req(req)->ctrl->instance;
101 __entry->cid = nvme_req(req)->cmd->common.command_id;
102 __entry->result = le64_to_cpu(nvme_req(req)->result.u64);
103 __entry->retries = nvme_req(req)->retries;
104 __entry->flags = nvme_req(req)->flags;
105 __entry->status = nvme_req(req)->status;
152 __entry->ctrl_id = nvme_req(req)->ctrl->instance;
H A Dfault_inject.c72 fault_inject = &nvme_req(req)->ctrl->fault_inject; in nvme_should_fail()
80 nvme_req(req)->status = status; in nvme_should_fail()
H A Dioctl.c111 nvme_req(req)->flags |= NVME_REQ_USERCMD; in nvme_alloc_user_request()
123 struct nvme_ctrl *ctrl = nvme_req(req)->ctrl; in nvme_map_user_request()
192 ctrl = nvme_req(req)->ctrl; in nvme_submit_user_cmd()
197 *result = le64_to_cpu(nvme_req(req)->result.u64); in nvme_submit_user_cmd()
422 if (nvme_req(req)->flags & NVME_REQ_CANCELLED) { in nvme_uring_cmd_end_io()
425 pdu->status = nvme_req(req)->status; in nvme_uring_cmd_end_io()
429 pdu->result = le64_to_cpu(nvme_req(req)->result.u64); in nvme_uring_cmd_end_io()
H A Dcore.c328 crd = (nvme_req(req)->status & NVME_STATUS_CRD) >> 11; in nvme_retry_req()
330 delay = nvme_req(req)->ctrl->crdt[crd - 1] * 100; in nvme_retry_req()
332 nvme_req(req)->retries++; in nvme_retry_req()
340 struct nvme_request *nr = nvme_req(req); in nvme_log_error()
371 struct nvme_request *nr = nvme_req(req); in nvme_log_err_passthru()
401 if (likely(nvme_req(req)->status == 0)) in nvme_decide_disposition()
405 (nvme_req(req)->status & NVME_STATUS_DNR) || in nvme_decide_disposition()
406 nvme_req(req)->retries >= nvme_max_retries) in nvme_decide_disposition()
409 if ((nvme_req(req)->status & NVME_SCT_SC_MASK) == NVME_SC_AUTH_REQUIRED) in nvme_decide_disposition()
413 if (nvme_is_path_error(nvme_req(re in nvme_decide_disposition()
[all...]
H A Dfabrics.h208 nvme_req(rq)->status = NVME_SC_HOST_ABORTED_CMD; in nvmf_complete_timed_out_request()
H A Drdma.c302 nvme_req(rq)->ctrl = &ctrl->ctrl; in nvme_rdma_init_request()
309 req->metadata_sgl = (void *)nvme_req(rq) + in nvme_rdma_init_request()
314 nvme_req(rq)->cmd = req->sqe.data; in nvme_rdma_init_request()
2000 struct nvme_command *c = nvme_req(rq)->cmd; in nvme_rdma_queue_rq()
2089 nvme_req(rq)->status = NVME_SC_INVALID_PI; in nvme_rdma_check_pi_status()
2096 nvme_req(rq)->status = NVME_SC_GUARD_CHECK; in nvme_rdma_check_pi_status()
2099 nvme_req(rq)->status = NVME_SC_REFTAG_CHECK; in nvme_rdma_check_pi_status()
2102 nvme_req(rq)->status = NVME_SC_APPTAG_CHECK; in nvme_rdma_check_pi_status()
H A Dapple.c603 nvme_req(req)->status != NVME_SC_SUCCESS, in apple_nvme_handle_cqe()
790 struct nvme_request *nreq = nvme_req(req); in apple_nvme_init_request()
906 nvme_req(req)->status = NVME_SC_HOST_ABORTED_CMD; in apple_nvme_timeout()
907 nvme_req(req)->flags |= NVME_REQ_CANCELLED; in apple_nvme_timeout()
932 nvme_req(req)->flags |= NVME_REQ_CANCELLED; in apple_nvme_timeout()
H A Dpci.c509 nvme_req(req)->ctrl = set->driver_data; in nvme_pci_init_request()
510 nvme_req(req)->cmd = &iod->cmd; in nvme_pci_init_request()
605 nvme_req(req)->flags & NVME_REQ_USERCMD; in nvme_pci_metadata_use_sgls()
614 if (nvme_req(req)->flags & NVME_REQ_USERCMD) in nvme_pci_use_sgls()
1310 nvme_req(req)->status != NVME_SC_SUCCESS, in nvme_handle_cqe()
1529 "Abort status: 0x%x", nvme_req(req)->status); in abort_endio()
1654 nvme_req(req)->flags |= NVME_REQ_CANCELLED; in nvme_timeout()
1668 opcode = nvme_req(req)->cmd->common.opcode; in nvme_timeout()
1674 nvme_req(req)->flags |= NVME_REQ_CANCELLED; in nvme_timeout()
H A Dtcp.c567 nvme_req(rq)->ctrl = &ctrl->ctrl; in nvme_tcp_init_request()
568 nvme_req(rq)->cmd = &pdu->cmd; in nvme_tcp_init_request()
H A Dfc.c2121 nvme_req(rq)->ctrl = &ctrl->ctrl; in nvme_fc_init_request()
2122 nvme_req(rq)->cmd = &op->op.cmd_iu.sqe; in nvme_fc_init_request()
/linux/drivers/nvme/target/
H A Dpassthru.c218 struct nvme_ctrl *ctrl = nvme_req(rq)->ctrl; in nvmet_passthru_execute_cmd_work()
241 req->cqe->result = nvme_req(rq)->result; in nvmet_passthru_execute_cmd_work()
254 req->cqe->result = nvme_req(rq)->result; in nvmet_passthru_req_done()
255 nvmet_req_complete(req, nvme_req(rq)->status); in nvmet_passthru_req_done()
H A Dloop.c19 struct nvme_request nvme_req; member
66 static void nvme_loop_queue_response(struct nvmet_req *nvme_req);
210 nvme_req(req)->ctrl = &ctrl->ctrl; in nvme_loop_init_request()
211 nvme_req(req)->cmd = &iod->cmd; in nvme_loop_init_request()
H A Dfc.c215 nvmet_req_to_fod(struct nvmet_req *nvme_req) in nvmet_req_to_fod() argument
217 return container_of(nvme_req, struct nvmet_fc_fcp_iod, req); in nvmet_req_to_fod()
1933 static void nvmet_fc_fcp_nvme_cmd_done(struct nvmet_req *nvme_req);
2515 nvmet_fc_fcp_nvme_cmd_done(struct nvmet_req *nvme_req) in nvmet_fc_fcp_nvme_cmd_done() argument
2517 struct nvmet_fc_fcp_iod *fod = nvmet_req_to_fod(nvme_req); in nvmet_fc_fcp_nvme_cmd_done()