Home
last modified time | relevance | path

Searched refs:shadow_wqe (Results 1 – 5 of 5) sorted by relevance

/linux/drivers/infiniband/hw/mana/
H A Dcq.c193 struct ud_sq_shadow_wqe *shadow_wqe; in handle_ud_sq_cqe() local
195 shadow_wqe = shadow_queue_get_next_to_complete(&qp->shadow_sq); in handle_ud_sq_cqe()
196 if (!shadow_wqe) in handle_ud_sq_cqe()
199 shadow_wqe->header.error_code = rdma_cqe->ud_send.vendor_error; in handle_ud_sq_cqe()
201 wq->tail += shadow_wqe->header.posted_wqe_size; in handle_ud_sq_cqe()
209 struct ud_rq_shadow_wqe *shadow_wqe; in handle_ud_rq_cqe() local
211 shadow_wqe = shadow_queue_get_next_to_complete(&qp->shadow_rq); in handle_ud_rq_cqe()
212 if (!shadow_wqe) in handle_ud_rq_cqe()
215 shadow_wqe->byte_len = rdma_cqe->ud_recv.msg_len; in handle_ud_rq_cqe()
216 shadow_wqe->src_qpn = rdma_cqe->ud_recv.src_qpn; in handle_ud_rq_cqe()
[all …]
H A Dwr.c17 struct ud_rq_shadow_wqe *shadow_wqe; in mana_ib_post_recv_ud() local
38 shadow_wqe = shadow_queue_producer_entry(&qp->shadow_rq); in mana_ib_post_recv_ud()
39 memset(shadow_wqe, 0, sizeof(*shadow_wqe)); in mana_ib_post_recv_ud()
40 shadow_wqe->header.opcode = IB_WC_RECV; in mana_ib_post_recv_ud()
41 shadow_wqe->header.wr_id = wr->wr_id; in mana_ib_post_recv_ud()
42 shadow_wqe->header.posted_wqe_size = wqe_info.wqe_size_in_bu; in mana_ib_post_recv_ud()
85 struct ud_sq_shadow_wqe *shadow_wqe; in mana_ib_post_send_ud() local
133 shadow_wqe = shadow_queue_producer_entry(&qp->shadow_sq); in mana_ib_post_send_ud()
134 memset(shadow_wqe, 0, sizeof(*shadow_wqe)); in mana_ib_post_send_ud()
135 shadow_wqe->header.opcode = IB_WC_SEND; in mana_ib_post_send_ud()
[all …]
/linux/drivers/net/ethernet/huawei/hinic/
H A Dhinic_hw_wq.c78 (((unsigned long)(wqe) - (unsigned long)(wq)->shadow_wqe) \
381 wq->shadow_wqe = devm_kcalloc(&pdev->dev, wq->num_q_pages, in alloc_wqes_shadow()
383 if (!wq->shadow_wqe) in alloc_wqes_shadow()
394 devm_kfree(&pdev->dev, wq->shadow_wqe); in alloc_wqes_shadow()
408 devm_kfree(&pdev->dev, wq->shadow_wqe); in free_wqes_shadow()
773 void *shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size]; in hinic_get_wqe()
846 void *shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size]; in hinic_read_wqe()
878 return WQE_IN_RANGE(wqe, wq->shadow_wqe, in wqe_shadow()
879 &wq->shadow_wqe[wqe_shadow_size]); in wqe_shadow()
900 shadow_addr = &wq->shadow_wqe[curr_pg * wq->max_wqe_size]; in hinic_write_wqe()
H A Dhinic_hw_wq.h41 u8 *shadow_wqe; member
/linux/drivers/net/ethernet/huawei/hinic3/
H A Dhinic3_cmdq.c339 const struct cmdq_wqe *shadow_wqe) in cmdq_wqe_fill() argument
341 const struct cmdq_header *src = (struct cmdq_header *)shadow_wqe; in cmdq_wqe_fill()