Lines Matching defs:wqe

394 	struct rvt_swqe *wqe;
449 wqe = rvt_get_swqe_ptr(qp, qp->s_last);
450 hfi1_trdma_send_complete(qp, wqe, qp->s_last != qp->s_acked ?
469 wqe = rvt_get_swqe_ptr(qp, qp->s_cur);
495 if ((wqe->wr.send_flags & IB_SEND_FENCE) &&
497 (wqe->wr.opcode != IB_WR_TID_RDMA_READ ||
506 if (wqe->wr.opcode == IB_WR_REG_MR ||
507 wqe->wr.opcode == IB_WR_LOCAL_INV) {
517 if (!(wqe->wr.send_flags &
521 wqe->wr.ex.invalidate_rkey);
524 rvt_send_complete(qp, wqe,
533 qp->s_psn = wqe->psn;
540 len = wqe->length;
549 hfi1_tid_rdma_wqe_interlock(qp, wqe))
552 switch (wqe->wr.opcode) {
557 if (!rvt_rc_credit_avail(qp, wqe))
564 if (wqe->wr.opcode == IB_WR_SEND) {
566 } else if (wqe->wr.opcode == IB_WR_SEND_WITH_IMM) {
569 ohdr->u.imm_data = wqe->wr.ex.imm_data;
575 wqe->wr.ex.invalidate_rkey);
578 if (wqe->wr.send_flags & IB_SEND_SOLICITED)
591 if (!rvt_rc_credit_avail(qp, wqe))
595 wqe->rdma_wr.remote_addr,
598 cpu_to_be32(wqe->rdma_wr.rkey);
606 if (wqe->wr.opcode == IB_WR_RDMA_WRITE) {
612 ohdr->u.rc.imm_data = wqe->wr.ex.imm_data;
614 if (wqe->wr.send_flags & IB_SEND_SOLICITED)
635 hwords += hfi1_build_tid_rdma_write_req(qp, wqe, ohdr,
702 req = wqe_to_tid_req(wqe);
710 req->comp_seg = delta_psn(bth2, wqe->psn);
717 delta_psn(wqe->lpsn, bth2) + 1;
722 wqe->wr.opcode,
723 wqe->psn, wqe->lpsn,
743 wqe->rdma_wr.remote_addr,
746 cpu_to_be32(wqe->rdma_wr.rkey);
759 wpriv = wqe->priv;
760 req = wqe_to_tid_req(wqe);
762 wqe->wr.opcode,
763 wqe->psn, wqe->lpsn,
765 delta = cmp_psn(qp->s_psn, wqe->psn);
792 qp->s_sge.sge = wqe->sg_list[0];
793 qp->s_sge.sg_list = wqe->sg_list + 1;
794 qp->s_sge.num_sge = wqe->wr.num_sge;
795 qp->s_sge.total_len = wqe->length;
796 qp->s_len = wqe->length;
813 wqe->length - req->seg_len * req->cur_seg);
814 delta = hfi1_build_tid_rdma_read_req(qp, wqe, ohdr,
847 if (wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
848 wqe->wr.opcode == IB_WR_OPFN) {
850 put_ib_ateth_swap(wqe->atomic_wr.swap,
852 put_ib_ateth_compare(wqe->atomic_wr.compare_add,
856 put_ib_ateth_swap(wqe->atomic_wr.compare_add,
860 put_ib_ateth_vaddr(wqe->atomic_wr.remote_addr,
863 wqe->atomic_wr.rkey);
875 if (wqe->wr.opcode != IB_WR_TID_RDMA_READ) {
876 qp->s_sge.sge = wqe->sg_list[0];
877 qp->s_sge.sg_list = wqe->sg_list + 1;
878 qp->s_sge.num_sge = wqe->wr.num_sge;
879 qp->s_sge.total_len = wqe->length;
880 qp->s_len = wqe->length;
887 if (wqe->wr.opcode == IB_WR_RDMA_READ ||
888 wqe->wr.opcode == IB_WR_TID_RDMA_WRITE)
889 qp->s_psn = wqe->lpsn + 1;
890 else if (wqe->wr.opcode == IB_WR_TID_RDMA_READ)
906 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu);
920 if (wqe->wr.opcode == IB_WR_SEND) {
922 } else if (wqe->wr.opcode == IB_WR_SEND_WITH_IMM) {
925 ohdr->u.imm_data = wqe->wr.ex.imm_data;
930 ohdr->u.ieth = cpu_to_be32(wqe->wr.ex.invalidate_rkey);
933 if (wqe->wr.send_flags & IB_SEND_SOLICITED)
951 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu);
965 if (wqe->wr.opcode == IB_WR_RDMA_WRITE) {
970 ohdr->u.imm_data = wqe->wr.ex.imm_data;
972 if (wqe->wr.send_flags & IB_SEND_SOLICITED)
991 len = (delta_psn(qp->s_psn, wqe->psn)) * pmtu;
993 wqe->rdma_wr.remote_addr + len,
996 cpu_to_be32(wqe->rdma_wr.rkey);
997 ohdr->u.rc.reth.length = cpu_to_be32(wqe->length - len);
1001 qp->s_psn = wqe->lpsn + 1;
1015 req = wqe_to_tid_req(wqe);
1019 req->comp_seg = delta_psn(qp->s_psn, wqe->psn);
1020 len = wqe->length - (req->comp_seg * remote->max_len);
1024 hwords += hfi1_build_tid_rdma_write_req(qp, wqe, ohdr, &bth1,
1026 qp->s_psn = wqe->lpsn + 1;
1029 priv->pending_tid_w_resp += delta_psn(wqe->lpsn, bth2) + 1;
1033 trace_hfi1_tid_req_make_req_write(qp, 0, wqe->wr.opcode,
1034 wqe->psn, wqe->lpsn, req);
1038 if (wqe->wr.opcode != IB_WR_TID_RDMA_READ)
1041 req = wqe_to_tid_req(wqe);
1042 wpriv = wqe->priv;
1048 req->cur_seg = delta_psn(qp->s_psn, wqe->psn) / priv->pkts_ps;
1057 hfi1_tid_rdma_restart_req(qp, wqe, &bth2);
1066 hfi1_trdma_send_complete(qp, wqe, IB_WC_LOC_QP_OP_ERR);
1071 wqe->length - req->seg_len * req->cur_seg);
1075 delta = hfi1_build_tid_rdma_read_packet(wqe, ohdr, &bth1,
1088 trace_hfi1_tid_req_make_req_read(qp, 0, wqe->wr.opcode,
1089 wqe->psn, wqe->lpsn, req);
1092 req = wqe_to_tid_req(wqe);
1093 delta = cmp_psn(qp->s_psn, wqe->psn);
1099 if (wqe->wr.opcode != IB_WR_TID_RDMA_READ || delta == 0 ||
1114 wpriv = wqe->priv;
1117 wqe->length - req->seg_len * req->cur_seg);
1118 delta = hfi1_build_tid_rdma_read_req(qp, wqe, ohdr, &bth1,
1131 trace_hfi1_tid_req_make_req_read(qp, 0, wqe->wr.opcode,
1132 wqe->psn, wqe->lpsn, req);
1136 delta = delta_psn(bth2, wqe->psn);
1138 wqe->wr.opcode != IB_WR_TID_RDMA_WRITE)
1411 * @wqe: the wqe
1414 * for the current wqe.
1418 struct rvt_swqe *wqe)
1420 u32 opcode = wqe->wr.opcode;
1427 struct tid_rdma_request *req = wqe_to_tid_req(wqe);
1430 if (cmp_psn(psn, wqe->lpsn) <= 0) {
1433 cur_seg = (psn - wqe->psn) / priv->pkts_ps;
1438 wqe->wr.opcode,
1439 wqe->psn,
1440 wqe->lpsn,
1461 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n);
1475 if (cmp_psn(psn, wqe->psn) <= 0) {
1479 update_num_rd_atomic(qp, psn, wqe);
1489 wqe = rvt_get_swqe_ptr(qp, n);
1490 diff = cmp_psn(psn, wqe->psn);
1492 /* Point wqe back to the previous one*/
1493 wqe = rvt_get_swqe_ptr(qp, qp->s_cur);
1506 update_num_rd_atomic(qp, psn, wqe);
1508 opcode = wqe->wr.opcode;
1567 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1582 if (wqe->wr.opcode == IB_WR_OPFN) {
1591 wqe = do_rc_completion(qp, wqe, ibp);
1595 if (wqe->wr.opcode == IB_WR_TID_RDMA_READ) {
1598 req = wqe_to_tid_req(wqe);
1603 hfi1_trdma_send_complete(qp, wqe,
1616 if (wqe->wr.opcode == IB_WR_RDMA_READ ||
1617 wqe->wr.opcode == IB_WR_TID_RDMA_READ)
1637 struct rvt_swqe *wqe;
1643 wqe = rvt_get_swqe_ptr(qp, n);
1644 if (cmp_psn(psn, wqe->lpsn) <= 0) {
1645 if (wqe->wr.opcode == IB_WR_RDMA_READ ||
1646 wqe->wr.opcode == IB_WR_TID_RDMA_READ ||
1647 wqe->wr.opcode == IB_WR_TID_RDMA_WRITE)
1648 qp->s_sending_psn = wqe->lpsn + 1;
1697 struct rvt_swqe *wqe;
1740 wqe = rvt_get_swqe_ptr(qp, tail);
1741 req = wqe_to_tid_req(wqe);
1781 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1782 req = wqe_to_tid_req(wqe);
1783 if (wqe->wr.opcode == IB_WR_TID_RDMA_WRITE &&
1789 wqe = rvt_get_swqe_ptr(qp, qp->s_last);
1790 if (cmp_psn(wqe->lpsn, qp->s_sending_psn) >= 0 &&
1793 trdma_clean_swqe(qp, wqe);
1794 trace_hfi1_qp_send_completion(qp, wqe, qp->s_last);
1796 wqe,
1797 ib_hfi1_wc_opcode[wqe->wr.opcode],
1825 struct rvt_swqe *wqe,
1836 trace_hfi1_rc_completion(qp, wqe->lpsn);
1837 if (cmp_psn(wqe->lpsn, qp->s_sending_psn) < 0 ||
1839 trdma_clean_swqe(qp, wqe);
1840 trace_hfi1_qp_send_completion(qp, wqe, qp->s_last);
1842 wqe,
1843 ib_hfi1_wc_opcode[wqe->wr.opcode],
1874 if (wqe->wr.opcode != IB_WR_TID_RDMA_WRITE)
1875 update_last_psn(qp, wqe->lpsn);
1886 wqe = rvt_get_swqe_ptr(qp, qp->s_cur);
1889 qp->s_psn = wqe->psn;
1896 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1902 return wqe;
1970 struct rvt_swqe *wqe;
1986 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1993 while ((diff = delta_psn(ack_psn, wqe->lpsn)) >= 0) {
2000 if (wqe->wr.opcode == IB_WR_RDMA_READ &&
2015 if ((wqe->wr.opcode == IB_WR_RDMA_READ &&
2017 (wqe->wr.opcode == IB_WR_TID_RDMA_READ &&
2019 ((wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
2020 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD) &&
2022 (wqe->wr.opcode == IB_WR_TID_RDMA_WRITE &&
2031 if (wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
2032 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD) {
2033 u64 *vaddr = wqe->sg_list[0].vaddr;
2036 if (wqe->wr.opcode == IB_WR_OPFN)
2040 (wqe->wr.opcode == IB_WR_RDMA_READ ||
2041 wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
2042 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD)) {
2061 if (wqe->wr.opcode == IB_WR_TID_RDMA_WRITE)
2064 wqe = do_rc_completion(qp, wqe, ibp);
2069 trace_hfi1_rc_ack_do(qp, aeth, psn, wqe);
2074 if (wqe->wr.opcode == IB_WR_TID_RDMA_READ) {
2075 if (wqe_to_tid_req(wqe)->ack_pending)
2155 if (wqe->wr.opcode == IB_WR_TID_RDMA_WRITE &&
2157 cmp_psn(psn, wqe->psn) >= 0)
2169 if (!(rdi->post_parms[wqe->wr.opcode].flags &
2185 if (wqe->wr.opcode == IB_WR_TID_RDMA_WRITE) {
2232 if (wqe->wr.opcode == IB_WR_TID_RDMA_READ)
2235 hfi1_trdma_send_complete(qp, wqe, status);
2266 struct rvt_swqe *wqe;
2272 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
2274 while (cmp_psn(psn, wqe->lpsn) > 0) {
2275 if (wqe->wr.opcode == IB_WR_RDMA_READ ||
2276 wqe->wr.opcode == IB_WR_TID_RDMA_READ ||
2277 wqe->wr.opcode == IB_WR_TID_RDMA_WRITE ||
2278 wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
2279 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD)
2281 wqe = do_rc_completion(qp, wqe, ibp);
2310 struct rvt_swqe *wqe;
2354 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
2369 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
2370 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ))
2378 wqe, psn, pmtu);
2385 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ))
2432 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
2434 wqe, psn, pmtu);
2441 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ))
2475 rvt_send_complete(qp, wqe, status);