Lines Matching +full:post +full:-

3 * Copyright (c) 2015-2016 Intel Corporation.  All rights reserved.
15 * - Redistributions of source code must retain the above
19 * - Redistributions in binary form must reproduce the above
44 * i40iw_nop_1 - insert a nop wqe and move head. no post work
54 if (!qp->sq_ring.head) in i40iw_nop_1()
57 wqe_idx = I40IW_RING_GETCURRENT_HEAD(qp->sq_ring); in i40iw_nop_1()
58 wqe = qp->sq_base[wqe_idx].elem; in i40iw_nop_1()
60 qp->sq_wrtrk_array[wqe_idx].wqe_size = I40IW_QP_WQE_MIN_SIZE; in i40iw_nop_1()
62 peek_head = (qp->sq_ring.head + 1) % qp->sq_ring.size; in i40iw_nop_1()
63 wqe_0 = qp->sq_base[peek_head].elem; in i40iw_nop_1()
65 wqe_0[3] = LS_64(!qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_nop_1()
67 wqe_0[3] = LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_nop_1()
75 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID) | nop_signature++; in i40iw_nop_1()
84 * i40iw_qp_post_wr - post wr to hrdware
96 get_64bit_val(qp->shadow_area, 0, &temp); in i40iw_qp_post_wr()
99 sw_sq_head = I40IW_RING_GETCURRENT_HEAD(qp->sq_ring); in i40iw_qp_post_wr()
101 if (sw_sq_head > qp->initial_ring.head) { in i40iw_qp_post_wr()
102 if ((hw_sq_tail >= qp->initial_ring.head) && in i40iw_qp_post_wr()
104 writel(qp->qp_id, qp->wqe_alloc_reg); in i40iw_qp_post_wr()
106 } else if (sw_sq_head != qp->initial_ring.head) { in i40iw_qp_post_wr()
107 if ((hw_sq_tail >= qp->initial_ring.head) || in i40iw_qp_post_wr()
109 writel(qp->qp_id, qp->wqe_alloc_reg); in i40iw_qp_post_wr()
114 qp->initial_ring.head = qp->sq_ring.head; in i40iw_qp_post_wr()
118 * i40iw_qp_ring_push_db - ring qp doorbell
124 …set_32bit_val(qp->push_db, 0, LS_32((wqe_idx >> 2), I40E_PFPE_WQEALLOC_WQE_DESC_INDEX) | qp->qp_id… in i40iw_qp_ring_push_db()
125 qp->initial_ring.head = I40IW_RING_GETCURRENT_HEAD(qp->sq_ring); in i40iw_qp_ring_push_db()
129 * i40iw_qp_get_next_send_wqe - return next wqe ptr
149 *wqe_idx = I40IW_RING_GETCURRENT_HEAD(qp->sq_ring); in i40iw_qp_get_next_send_wqe()
152 qp->swqe_polarity = !qp->swqe_polarity; in i40iw_qp_get_next_send_wqe()
153 wqe_ptr = (uintptr_t)qp->sq_base[*wqe_idx].elem; in i40iw_qp_get_next_send_wqe()
156 nop_wqe_cnt = (u8)(I40IW_QP_WQE_MAX_SIZE - offset) / I40IW_QP_WQE_MIN_SIZE; in i40iw_qp_get_next_send_wqe()
159 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_qp_get_next_send_wqe()
164 *wqe_idx = I40IW_RING_GETCURRENT_HEAD(qp->sq_ring); in i40iw_qp_get_next_send_wqe()
166 qp->swqe_polarity = !qp->swqe_polarity; in i40iw_qp_get_next_send_wqe()
171 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_qp_get_next_send_wqe()
174 *wqe_idx = I40IW_RING_GETCURRENT_HEAD(qp->sq_ring); in i40iw_qp_get_next_send_wqe()
176 qp->swqe_polarity = !qp->swqe_polarity; in i40iw_qp_get_next_send_wqe()
178 I40IW_RING_MOVE_HEAD_BY_COUNT(qp->sq_ring, in i40iw_qp_get_next_send_wqe()
183 wqe = qp->sq_base[*wqe_idx].elem; in i40iw_qp_get_next_send_wqe()
185 peek_head = I40IW_RING_GETCURRENT_HEAD(qp->sq_ring); in i40iw_qp_get_next_send_wqe()
186 wqe_0 = qp->sq_base[peek_head].elem; in i40iw_qp_get_next_send_wqe()
189 if (RS_64(wqe_0[3], I40IWQPSQ_VALID) != !qp->swqe_polarity) in i40iw_qp_get_next_send_wqe()
190 wqe_0[3] = LS_64(!qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_qp_get_next_send_wqe()
193 qp->sq_wrtrk_array[*wqe_idx].wrid = wr_id; in i40iw_qp_get_next_send_wqe()
194 qp->sq_wrtrk_array[*wqe_idx].wr_len = total_size; in i40iw_qp_get_next_send_wqe()
195 qp->sq_wrtrk_array[*wqe_idx].wqe_size = wqe_size; in i40iw_qp_get_next_send_wqe()
200 * i40iw_set_fragment - set fragment in wqe
208 set_64bit_val(wqe, offset, LS_64(sge->tag_off, I40IWQPSQ_FRAG_TO)); in i40iw_set_fragment()
210 (LS_64(sge->len, I40IWQPSQ_FRAG_LEN) | in i40iw_set_fragment()
211 LS_64(sge->stag, I40IWQPSQ_FRAG_STAG))); in i40iw_set_fragment()
216 * i40iw_qp_get_next_recv_wqe - get next qp's rcv wqe
225 if (I40IW_RING_FULL_ERR(qp->rq_ring)) in i40iw_qp_get_next_recv_wqe()
228 I40IW_ATOMIC_RING_MOVE_HEAD(qp->rq_ring, *wqe_idx, ret_code); in i40iw_qp_get_next_recv_wqe()
232 qp->rwqe_polarity = !qp->rwqe_polarity; in i40iw_qp_get_next_recv_wqe()
234 wqe = qp->rq_base[*wqe_idx * (qp->rq_wqe_size_multiplier >> 2)].elem; in i40iw_qp_get_next_recv_wqe()
240 * i40iw_rdma_write - rdma write operation
242 * @info: post sq information
243 * @post_sq: flag to post sq
258 op_info = &info->op.rdma_write; in i40iw_rdma_write()
259 if (op_info->num_lo_sges > qp->max_sq_frag_cnt) in i40iw_rdma_write()
262 for (i = 0; i < op_info->num_lo_sges; i++) in i40iw_rdma_write()
263 total_size += op_info->lo_sg_list[i].len; in i40iw_rdma_write()
268 read_fence |= info->read_fence; in i40iw_rdma_write()
270 ret_code = i40iw_fragcnt_to_wqesize_sq(op_info->num_lo_sges, &wqe_size); in i40iw_rdma_write()
274 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, wqe_size, total_size, info->wr_id); in i40iw_rdma_write()
278 LS_64(op_info->rem_addr.tag_off, I40IWQPSQ_FRAG_TO)); in i40iw_rdma_write()
279 if (!op_info->rem_addr.stag) in i40iw_rdma_write()
282 header = LS_64(op_info->rem_addr.stag, I40IWQPSQ_REMSTAG) | in i40iw_rdma_write()
284 LS_64((op_info->num_lo_sges > 1 ? (op_info->num_lo_sges - 1) : 0), I40IWQPSQ_ADDFRAGCNT) | in i40iw_rdma_write()
286 LS_64(info->local_fence, I40IWQPSQ_LOCALFENCE) | in i40iw_rdma_write()
287 LS_64(info->signaled, I40IWQPSQ_SIGCOMPL) | in i40iw_rdma_write()
288 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_rdma_write()
290 i40iw_set_fragment(wqe, 0, op_info->lo_sg_list); in i40iw_rdma_write()
292 for (i = 1, byte_off = 32; i < op_info->num_lo_sges; i++) { in i40iw_rdma_write()
293 i40iw_set_fragment(wqe, byte_off, &op_info->lo_sg_list[i]); in i40iw_rdma_write()
308 * i40iw_rdma_read - rdma read command
310 * @info: post sq information
312 * @post_sq: flag to post sq
327 op_info = &info->op.rdma_read; in i40iw_rdma_read()
331 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, wqe_size, op_info->lo_addr.len, info->wr_id); in i40iw_rdma_read()
334 local_fence |= info->local_fence; in i40iw_rdma_read()
336 set_64bit_val(wqe, 16, LS_64(op_info->rem_addr.tag_off, I40IWQPSQ_FRAG_TO)); in i40iw_rdma_read()
337 header = LS_64(op_info->rem_addr.stag, I40IWQPSQ_REMSTAG) | in i40iw_rdma_read()
339 LS_64(info->read_fence, I40IWQPSQ_READFENCE) | in i40iw_rdma_read()
341 LS_64(info->signaled, I40IWQPSQ_SIGCOMPL) | in i40iw_rdma_read()
342 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_rdma_read()
344 i40iw_set_fragment(wqe, 0, &op_info->lo_addr); in i40iw_rdma_read()
356 * i40iw_send - rdma send command
358 * @info: post sq information
360 * @post_sq: flag to post sq
375 op_info = &info->op.send; in i40iw_send()
376 if (qp->max_sq_frag_cnt < op_info->num_sges) in i40iw_send()
379 for (i = 0; i < op_info->num_sges; i++) in i40iw_send()
380 total_size += op_info->sg_list[i].len; in i40iw_send()
381 ret_code = i40iw_fragcnt_to_wqesize_sq(op_info->num_sges, &wqe_size); in i40iw_send()
385 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, wqe_size, total_size, info->wr_id); in i40iw_send()
389 read_fence |= info->read_fence; in i40iw_send()
392 LS_64(info->op_type, I40IWQPSQ_OPCODE) | in i40iw_send()
393 LS_64((op_info->num_sges > 1 ? (op_info->num_sges - 1) : 0), in i40iw_send()
396 LS_64(info->local_fence, I40IWQPSQ_LOCALFENCE) | in i40iw_send()
397 LS_64(info->signaled, I40IWQPSQ_SIGCOMPL) | in i40iw_send()
398 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_send()
400 i40iw_set_fragment(wqe, 0, op_info->sg_list); in i40iw_send()
402 for (i = 1, byte_off = 32; i < op_info->num_sges; i++) { in i40iw_send()
403 i40iw_set_fragment(wqe, byte_off, &op_info->sg_list[i]); in i40iw_send()
417 * i40iw_inline_rdma_write - inline rdma write operation
419 * @info: post sq information
420 * @post_sq: flag to post sq
436 op_info = &info->op.inline_rdma_write; in i40iw_inline_rdma_write()
437 if (op_info->len > I40IW_MAX_INLINE_DATA_SIZE) in i40iw_inline_rdma_write()
440 ret_code = i40iw_inline_data_size_to_wqesize(op_info->len, &wqe_size); in i40iw_inline_rdma_write()
444 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, wqe_size, op_info->len, info->wr_id); in i40iw_inline_rdma_write()
448 read_fence |= info->read_fence; in i40iw_inline_rdma_write()
450 LS_64(op_info->rem_addr.tag_off, I40IWQPSQ_FRAG_TO)); in i40iw_inline_rdma_write()
452 header = LS_64(op_info->rem_addr.stag, I40IWQPSQ_REMSTAG) | in i40iw_inline_rdma_write()
454 LS_64(op_info->len, I40IWQPSQ_INLINEDATALEN) | in i40iw_inline_rdma_write()
456 LS_64((qp->push_db ? 1 : 0), I40IWQPSQ_PUSHWQE) | in i40iw_inline_rdma_write()
458 LS_64(info->local_fence, I40IWQPSQ_LOCALFENCE) | in i40iw_inline_rdma_write()
459 LS_64(info->signaled, I40IWQPSQ_SIGCOMPL) | in i40iw_inline_rdma_write()
460 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_inline_rdma_write()
463 src = (u8 *)(op_info->data); in i40iw_inline_rdma_write()
465 if (op_info->len <= 16) { in i40iw_inline_rdma_write()
466 memcpy(dest, src, op_info->len); in i40iw_inline_rdma_write()
471 memcpy(dest, src, op_info->len - 16); in i40iw_inline_rdma_write()
478 if (qp->push_db) { in i40iw_inline_rdma_write()
479 push = (u64 *)((uintptr_t)qp->push_wqe + (wqe_idx & 0x3) * 0x20); in i40iw_inline_rdma_write()
480 memcpy(push, wqe, (op_info->len > 16) ? op_info->len + 16 : 32); in i40iw_inline_rdma_write()
491 * i40iw_inline_send - inline send operation
493 * @info: post sq information
495 * @post_sq: flag to post sq
512 op_info = &info->op.inline_send; in i40iw_inline_send()
513 if (op_info->len > I40IW_MAX_INLINE_DATA_SIZE) in i40iw_inline_send()
516 ret_code = i40iw_inline_data_size_to_wqesize(op_info->len, &wqe_size); in i40iw_inline_send()
520 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, wqe_size, op_info->len, info->wr_id); in i40iw_inline_send()
524 read_fence |= info->read_fence; in i40iw_inline_send()
526 LS_64(info->op_type, I40IWQPSQ_OPCODE) | in i40iw_inline_send()
527 LS_64(op_info->len, I40IWQPSQ_INLINEDATALEN) | in i40iw_inline_send()
529 LS_64((qp->push_db ? 1 : 0), I40IWQPSQ_PUSHWQE) | in i40iw_inline_send()
531 LS_64(info->local_fence, I40IWQPSQ_LOCALFENCE) | in i40iw_inline_send()
532 LS_64(info->signaled, I40IWQPSQ_SIGCOMPL) | in i40iw_inline_send()
533 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_inline_send()
536 src = (u8 *)(op_info->data); in i40iw_inline_send()
538 if (op_info->len <= 16) { in i40iw_inline_send()
539 memcpy(dest, src, op_info->len); in i40iw_inline_send()
544 memcpy(dest, src, op_info->len - 16); in i40iw_inline_send()
551 if (qp->push_db) { in i40iw_inline_send()
552 push = (u64 *)((uintptr_t)qp->push_wqe + (wqe_idx & 0x3) * 0x20); in i40iw_inline_send()
553 memcpy(push, wqe, (op_info->len > 16) ? op_info->len + 16 : 32); in i40iw_inline_send()
564 * i40iw_stag_local_invalidate - stag invalidate operation
566 * @info: post sq information
567 * @post_sq: flag to post sq
579 op_info = &info->op.inv_local_stag; in i40iw_stag_local_invalidate()
580 local_fence = info->local_fence; in i40iw_stag_local_invalidate()
582 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, I40IW_QP_WQE_MIN_SIZE, 0, info->wr_id); in i40iw_stag_local_invalidate()
587 LS_64(op_info->target_stag, I40IWQPSQ_LOCSTAG)); in i40iw_stag_local_invalidate()
590 LS_64(info->read_fence, I40IWQPSQ_READFENCE) | in i40iw_stag_local_invalidate()
592 LS_64(info->signaled, I40IWQPSQ_SIGCOMPL) | in i40iw_stag_local_invalidate()
593 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_stag_local_invalidate()
606 * i40iw_mw_bind - Memory Window bind operation
608 * @info: post sq information
609 * @post_sq: flag to post sq
621 op_info = &info->op.bind_window; in i40iw_mw_bind()
623 local_fence |= info->local_fence; in i40iw_mw_bind()
624 wqe = i40iw_qp_get_next_send_wqe(qp, &wqe_idx, I40IW_QP_WQE_MIN_SIZE, 0, info->wr_id); in i40iw_mw_bind()
627 set_64bit_val(wqe, 0, (uintptr_t)op_info->va); in i40iw_mw_bind()
629 LS_64(op_info->mr_stag, I40IWQPSQ_PARENTMRSTAG) | in i40iw_mw_bind()
630 LS_64(op_info->mw_stag, I40IWQPSQ_MWSTAG)); in i40iw_mw_bind()
631 set_64bit_val(wqe, 16, op_info->bind_length); in i40iw_mw_bind()
633 LS_64(((op_info->enable_reads << 2) | in i40iw_mw_bind()
634 (op_info->enable_writes << 3)), in i40iw_mw_bind()
636 LS_64((op_info->addressing_type == I40IW_ADDR_TYPE_VA_BASED ? 1 : 0), in i40iw_mw_bind()
638 LS_64(info->read_fence, I40IWQPSQ_READFENCE) | in i40iw_mw_bind()
640 LS_64(info->signaled, I40IWQPSQ_SIGCOMPL) | in i40iw_mw_bind()
641 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_mw_bind()
654 * i40iw_post_receive - post receive wqe
656 * @info: post rq information
665 if (qp->max_rq_frag_cnt < info->num_sges) in i40iw_post_receive()
667 for (i = 0; i < info->num_sges; i++) in i40iw_post_receive()
668 total_size += info->sg_list[i].len; in i40iw_post_receive()
673 qp->rq_wrid_array[wqe_idx] = info->wr_id; in i40iw_post_receive()
676 header = LS_64((info->num_sges > 1 ? (info->num_sges - 1) : 0), in i40iw_post_receive()
678 LS_64(qp->rwqe_polarity, I40IWQPSQ_VALID); in i40iw_post_receive()
680 i40iw_set_fragment(wqe, 0, info->sg_list); in i40iw_post_receive()
682 for (i = 1, byte_off = 32; i < info->num_sges; i++) { in i40iw_post_receive()
683 i40iw_set_fragment(wqe, byte_off, &info->sg_list[i]); in i40iw_post_receive()
695 * i40iw_cq_request_notification - cq notification request (door bell)
708 get_64bit_val(cq->shadow_area, 32, &temp_val); in i40iw_cq_request_notification()
722 set_64bit_val(cq->shadow_area, 32, temp_val); in i40iw_cq_request_notification()
726 writel(cq->cq_id, cq->cqe_alloc_reg); in i40iw_cq_request_notification()
730 * i40iw_cq_post_entries - update tail in shadow memory
737 I40IW_RING_MOVE_TAIL_BY_COUNT(cq->cq_ring, count); in i40iw_cq_post_entries()
738 set_64bit_val(cq->shadow_area, 0, in i40iw_cq_post_entries()
739 I40IW_RING_GETCURRENT_HEAD(cq->cq_ring)); in i40iw_cq_post_entries()
744 * i40iw_cq_poll_completion - get cq completion info
762 if (cq->avoid_mem_cflct) in i40iw_cq_poll_completion()
770 if (polarity != cq->polarity) in i40iw_cq_poll_completion()
774 info->error = (bool)RS_64(qword3, I40IW_CQ_ERROR); in i40iw_cq_poll_completion()
775 info->push_dropped = (bool)RS_64(qword3, I40IWCQ_PSHDROP); in i40iw_cq_poll_completion()
776 if (info->error) { in i40iw_cq_poll_completion()
777 info->comp_status = I40IW_COMPL_STATUS_FLUSHED; in i40iw_cq_poll_completion()
778 info->major_err = (bool)RS_64(qword3, I40IW_CQ_MAJERR); in i40iw_cq_poll_completion()
779 info->minor_err = (bool)RS_64(qword3, I40IW_CQ_MINERR); in i40iw_cq_poll_completion()
781 info->comp_status = I40IW_COMPL_STATUS_SUCCESS; in i40iw_cq_poll_completion()
787 info->tcp_seq_num = (u32)RS_64(qword0, I40IWCQ_TCPSEQNUM); in i40iw_cq_poll_completion()
789 info->qp_id = (u32)RS_64(qword2, I40IWCQ_QPID); in i40iw_cq_poll_completion()
793 info->solicited_event = (bool)RS_64(qword3, I40IWCQ_SOEVENT); in i40iw_cq_poll_completion()
794 info->is_srq = (bool)RS_64(qword3, I40IWCQ_SRQ); in i40iw_cq_poll_completion()
802 info->qp_handle = (i40iw_qp_handle)(unsigned long)qp; in i40iw_cq_poll_completion()
805 array_idx = (wqe_idx * 4) / qp->rq_wqe_size_multiplier; in i40iw_cq_poll_completion()
806 if (info->comp_status == I40IW_COMPL_STATUS_FLUSHED) { in i40iw_cq_poll_completion()
807 info->wr_id = qp->rq_wrid_array[qp->rq_ring.tail]; in i40iw_cq_poll_completion()
808 array_idx = qp->rq_ring.tail; in i40iw_cq_poll_completion()
810 info->wr_id = qp->rq_wrid_array[array_idx]; in i40iw_cq_poll_completion()
813 info->op_type = I40IW_OP_TYPE_REC; in i40iw_cq_poll_completion()
815 info->stag_invalid_set = true; in i40iw_cq_poll_completion()
816 info->inv_stag = (u32)RS_64(qword2, I40IWCQ_INVSTAG); in i40iw_cq_poll_completion()
818 info->stag_invalid_set = false; in i40iw_cq_poll_completion()
820 info->bytes_xfered = (u32)RS_64(qword0, I40IWCQ_PAYLDLEN); in i40iw_cq_poll_completion()
821 I40IW_RING_SET_TAIL(qp->rq_ring, array_idx + 1); in i40iw_cq_poll_completion()
822 pring = &qp->rq_ring; in i40iw_cq_poll_completion()
824 if (qp->first_sq_wq) { in i40iw_cq_poll_completion()
825 qp->first_sq_wq = false; in i40iw_cq_poll_completion()
826 if (!wqe_idx && (qp->sq_ring.head == qp->sq_ring.tail)) { in i40iw_cq_poll_completion()
827 I40IW_RING_MOVE_HEAD_NOCHECK(cq->cq_ring); in i40iw_cq_poll_completion()
828 I40IW_RING_MOVE_TAIL(cq->cq_ring); in i40iw_cq_poll_completion()
829 set_64bit_val(cq->shadow_area, 0, in i40iw_cq_poll_completion()
830 I40IW_RING_GETCURRENT_HEAD(cq->cq_ring)); in i40iw_cq_poll_completion()
836 if (info->comp_status != I40IW_COMPL_STATUS_FLUSHED) { in i40iw_cq_poll_completion()
837 info->wr_id = qp->sq_wrtrk_array[wqe_idx].wrid; in i40iw_cq_poll_completion()
838 info->bytes_xfered = qp->sq_wrtrk_array[wqe_idx].wr_len; in i40iw_cq_poll_completion()
840 info->op_type = (u8)RS_64(qword3, I40IWCQ_OP); in i40iw_cq_poll_completion()
841 sw_wqe = qp->sq_base[wqe_idx].elem; in i40iw_cq_poll_completion()
844 addl_wqes = qp->sq_wrtrk_array[wqe_idx].wqe_size / I40IW_QP_WQE_MIN_SIZE; in i40iw_cq_poll_completion()
845 I40IW_RING_SET_TAIL(qp->sq_ring, (wqe_idx + addl_wqes)); in i40iw_cq_poll_completion()
851 tail = qp->sq_ring.tail; in i40iw_cq_poll_completion()
852 sw_wqe = qp->sq_base[tail].elem; in i40iw_cq_poll_completion()
855 info->op_type = op_type; in i40iw_cq_poll_completion()
856 addl_wqes = qp->sq_wrtrk_array[tail].wqe_size / I40IW_QP_WQE_MIN_SIZE; in i40iw_cq_poll_completion()
857 I40IW_RING_SET_TAIL(qp->sq_ring, (tail + addl_wqes)); in i40iw_cq_poll_completion()
859 info->wr_id = qp->sq_wrtrk_array[tail].wrid; in i40iw_cq_poll_completion()
860 info->bytes_xfered = qp->sq_wrtrk_array[tail].wr_len; in i40iw_cq_poll_completion()
865 pring = &qp->sq_ring; in i40iw_cq_poll_completion()
872 (info->comp_status == I40IW_COMPL_STATUS_FLUSHED)) in i40iw_cq_poll_completion()
877 I40IW_RING_MOVE_HEAD_NOCHECK(cq->cq_ring); in i40iw_cq_poll_completion()
879 if (I40IW_RING_GETCURRENT_HEAD(cq->cq_ring) == 0) in i40iw_cq_poll_completion()
880 cq->polarity ^= 1; in i40iw_cq_poll_completion()
882 I40IW_RING_MOVE_TAIL(cq->cq_ring); in i40iw_cq_poll_completion()
883 set_64bit_val(cq->shadow_area, 0, in i40iw_cq_poll_completion()
884 I40IW_RING_GETCURRENT_HEAD(cq->cq_ring)); in i40iw_cq_poll_completion()
886 if (info->is_srq) in i40iw_cq_poll_completion()
889 qword3 |= LS_64(pring->tail, I40IW_CQ_WQEIDX); in i40iw_cq_poll_completion()
897 * i40iw_get_wqe_shift - get shift count for maximum wqe size
915 * i40iw_get_sqdepth - get SQ depth (quantas)
934 * i40iw_get_rq_depth - get RQ depth (quantas)
979 * i40iw_qp_uk_init - initialize shared qp
997 if (info->max_sq_frag_cnt > I40IW_MAX_WQ_FRAGMENT_COUNT) in i40iw_qp_uk_init()
1000 if (info->max_rq_frag_cnt > I40IW_MAX_WQ_FRAGMENT_COUNT) in i40iw_qp_uk_init()
1002 i40iw_get_wqe_shift(info->max_sq_frag_cnt, info->max_inline_data, &sqshift); in i40iw_qp_uk_init()
1004 qp->sq_base = info->sq; in i40iw_qp_uk_init()
1005 qp->rq_base = info->rq; in i40iw_qp_uk_init()
1006 qp->shadow_area = info->shadow_area; in i40iw_qp_uk_init()
1007 qp->sq_wrtrk_array = info->sq_wrtrk_array; in i40iw_qp_uk_init()
1008 qp->rq_wrid_array = info->rq_wrid_array; in i40iw_qp_uk_init()
1010 qp->wqe_alloc_reg = info->wqe_alloc_reg; in i40iw_qp_uk_init()
1011 qp->qp_id = info->qp_id; in i40iw_qp_uk_init()
1013 qp->sq_size = info->sq_size; in i40iw_qp_uk_init()
1014 qp->push_db = info->push_db; in i40iw_qp_uk_init()
1015 qp->push_wqe = info->push_wqe; in i40iw_qp_uk_init()
1017 qp->max_sq_frag_cnt = info->max_sq_frag_cnt; in i40iw_qp_uk_init()
1018 sq_ring_size = qp->sq_size << sqshift; in i40iw_qp_uk_init()
1020 I40IW_RING_INIT(qp->sq_ring, sq_ring_size); in i40iw_qp_uk_init()
1021 I40IW_RING_INIT(qp->initial_ring, sq_ring_size); in i40iw_qp_uk_init()
1022 I40IW_RING_MOVE_HEAD(qp->sq_ring, ret_code); in i40iw_qp_uk_init()
1023 I40IW_RING_MOVE_TAIL(qp->sq_ring); in i40iw_qp_uk_init()
1024 I40IW_RING_MOVE_HEAD(qp->initial_ring, ret_code); in i40iw_qp_uk_init()
1025 qp->swqe_polarity = 1; in i40iw_qp_uk_init()
1026 qp->first_sq_wq = true; in i40iw_qp_uk_init()
1027 qp->swqe_polarity_deferred = 1; in i40iw_qp_uk_init()
1028 qp->rwqe_polarity = 0; in i40iw_qp_uk_init()
1030 if (!qp->use_srq) { in i40iw_qp_uk_init()
1031 qp->rq_size = info->rq_size; in i40iw_qp_uk_init()
1032 qp->max_rq_frag_cnt = info->max_rq_frag_cnt; in i40iw_qp_uk_init()
1033 I40IW_RING_INIT(qp->rq_ring, qp->rq_size); in i40iw_qp_uk_init()
1034 switch (info->abi_ver) { in i40iw_qp_uk_init()
1036 i40iw_get_wqe_shift(info->max_rq_frag_cnt, 0, &rqshift); in i40iw_qp_uk_init()
1043 qp->rq_wqe_size = rqshift; in i40iw_qp_uk_init()
1044 qp->rq_wqe_size_multiplier = 4 << rqshift; in i40iw_qp_uk_init()
1046 qp->ops = iw_qp_uk_ops; in i40iw_qp_uk_init()
1052 * i40iw_cq_uk_init - initialize shared cq (user and kernel)
1059 if ((info->cq_size < I40IW_MIN_CQ_SIZE) || in i40iw_cq_uk_init()
1060 (info->cq_size > I40IW_MAX_CQ_SIZE)) in i40iw_cq_uk_init()
1062 cq->cq_base = (struct i40iw_cqe *)info->cq_base; in i40iw_cq_uk_init()
1063 cq->cq_id = info->cq_id; in i40iw_cq_uk_init()
1064 cq->cq_size = info->cq_size; in i40iw_cq_uk_init()
1065 cq->cqe_alloc_reg = info->cqe_alloc_reg; in i40iw_cq_uk_init()
1066 cq->shadow_area = info->shadow_area; in i40iw_cq_uk_init()
1067 cq->avoid_mem_cflct = info->avoid_mem_cflct; in i40iw_cq_uk_init()
1069 I40IW_RING_INIT(cq->cq_ring, cq->cq_size); in i40iw_cq_uk_init()
1070 cq->polarity = 1; in i40iw_cq_uk_init()
1071 cq->ops = iw_cq_ops; in i40iw_cq_uk_init()
1077 * i40iw_device_init_uk - setup routines for iwarp shared device
1082 dev->ops_uk = iw_device_uk_ops; in i40iw_device_init_uk()
1086 * i40iw_clean_cq - clean cq entries
1097 cq_head = cq->cq_ring.head; in i40iw_clean_cq()
1098 temp = cq->polarity; in i40iw_clean_cq()
1100 if (cq->avoid_mem_cflct) in i40iw_clean_cq()
1101 cqe = (u64 *)&(((struct i40iw_extended_cqe *)cq->cq_base)[cq_head]); in i40iw_clean_cq()
1103 cqe = (u64 *)&cq->cq_base[cq_head]; in i40iw_clean_cq()
1114 cq_head = (cq_head + 1) % cq->cq_ring.size; in i40iw_clean_cq()
1121 * i40iw_nop - send a nop
1125 * @post_sq: flag to post sq
1144 LS_64(qp->swqe_polarity, I40IWQPSQ_VALID); in i40iw_nop()
1156 * i40iw_fragcnt_to_wqesize_sq - calculate wqe size based on fragment count for SQ
1187 * i40iw_fragcnt_to_wqesize_rq - calculate wqe size based on fragment count for RQ
1216 * i40iw_inline_data_size_to_wqesize - based on inline data, wqe size