Lines Matching defs:wqe
238 struct siw_wqe *wqe = tx_wqe(qp);
244 if (unlikely(wqe->wr_status != SIW_WR_IDLE)) {
248 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE);
250 wqe->wr_status = SIW_WR_QUEUED;
251 wqe->sqe.flags = 0;
252 wqe->sqe.num_sge = 1;
253 wqe->sqe.sge[0].length = 0;
254 wqe->sqe.sge[0].laddr = 0;
255 wqe->sqe.sge[0].lkey = 0;
260 wqe->sqe.rkey = 1;
261 wqe->sqe.raddr = 0;
262 wqe->processed = 0;
265 wqe->sqe.opcode = SIW_OP_WRITE;
269 wqe->sqe.opcode = SIW_OP_READ;
276 siw_read_to_orq(rreq, &wqe->sqe);
286 wqe->wr_status = SIW_WR_IDLE;
438 struct siw_wqe *wqe = tx_wqe(qp);
456 rreq->ddp_msn = htonl(wqe->sqe.sge[0].length);
458 rreq->ddp_mo = htonl(wqe->processed);
459 rreq->sink_stag = htonl(wqe->sqe.rkey);
460 rreq->sink_to = cpu_to_be64(wqe->sqe.raddr);
461 rreq->read_size = htonl(wqe->sqe.sge[0].length);
462 rreq->source_stag = htonl(wqe->sqe.sge[0].lkey);
464 cpu_to_be64(wqe->sqe.sge[0].laddr);
855 struct siw_wqe *wqe = tx_wqe(qp);
862 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE);
863 wqe->wr_status = SIW_WR_QUEUED;
866 memcpy(&wqe->sqe, sqe, sizeof(*sqe));
868 if (wqe->sqe.opcode >= SIW_NUM_OPCODES) {
872 if (wqe->sqe.flags & SIW_WQE_INLINE) {
873 if (wqe->sqe.opcode != SIW_OP_SEND &&
874 wqe->sqe.opcode != SIW_OP_WRITE) {
878 if (wqe->sqe.sge[0].length > SIW_MAX_INLINE) {
882 wqe->sqe.sge[0].laddr = (uintptr_t)&wqe->sqe.sge[1];
883 wqe->sqe.sge[0].lkey = 0;
884 wqe->sqe.num_sge = 1;
886 if (wqe->sqe.flags & SIW_WQE_READ_FENCE) {
888 if (unlikely(wqe->sqe.opcode == SIW_OP_READ ||
889 wqe->sqe.opcode ==
903 } else if (wqe->sqe.opcode == SIW_OP_READ ||
904 wqe->sqe.opcode == SIW_OP_READ_LOCAL_INV) {
912 wqe->sqe.num_sge = 1;
922 siw_read_to_orq(rreq, &wqe->sqe);
937 wqe->wr_status = SIW_WR_IDLE;
951 struct siw_wqe *wqe = tx_wqe(qp);
969 memset(wqe->mem, 0, sizeof(*wqe->mem) * SIW_MAX_SGE);
970 wqe->wr_status = SIW_WR_QUEUED;
973 wqe->sqe.opcode = SIW_OP_READ_RESPONSE;
974 wqe->sqe.flags = 0;
976 wqe->sqe.num_sge = 1;
977 wqe->sqe.sge[0].length = irqe->sge[0].length;
978 wqe->sqe.sge[0].laddr = irqe->sge[0].laddr;
979 wqe->sqe.sge[0].lkey = irqe->sge[0].lkey;
981 wqe->sqe.num_sge = 0;
987 wqe->sqe.sge[1].length = irqe->sge[1].length;
989 wqe->sqe.rkey = irqe->rkey;
990 wqe->sqe.raddr = irqe->raddr;
992 wqe->processed = 0;
1162 struct siw_wqe *wqe = tx_wqe(qp);
1182 if (wqe->wr_status != SIW_WR_IDLE) {
1184 tx_type(wqe), wqe->wr_status);
1186 siw_wqe_put_mem(wqe, tx_type(wqe));
1188 if (tx_type(wqe) != SIW_OP_READ_RESPONSE &&
1189 ((tx_type(wqe) != SIW_OP_READ &&
1190 tx_type(wqe) != SIW_OP_READ_LOCAL_INV) ||
1191 wqe->wr_status == SIW_WR_QUEUED))
1196 siw_sqe_complete(qp, &wqe->sqe, wqe->bytes,
1199 wqe->wr_status = SIW_WR_IDLE;
1237 struct siw_wqe *wqe = &qp->rx_untagged.wqe_active;
1242 if (wqe->wr_status != SIW_WR_IDLE) {
1244 rx_type(wqe), wqe->wr_status);
1246 siw_wqe_put_mem(wqe, rx_type(wqe));
1248 if (rx_type(wqe) == SIW_OP_RECEIVE) {
1249 siw_rqe_complete(qp, &wqe->rqe, wqe->bytes,
1251 } else if (rx_type(wqe) != SIW_OP_READ &&
1252 rx_type(wqe) != SIW_OP_READ_RESPONSE &&
1253 rx_type(wqe) != SIW_OP_WRITE) {
1254 siw_sqe_complete(qp, &wqe->sqe, 0, SIW_WC_WR_FLUSH_ERR);
1256 wqe->wr_status = SIW_WR_IDLE;
1258 wqe = &qp->rx_tagged.wqe_active;
1260 if (wqe->wr_status != SIW_WR_IDLE) {
1261 siw_wqe_put_mem(wqe, rx_type(wqe));
1262 wqe->wr_status = SIW_WR_IDLE;