Lines Matching full:hw
12 * @hw: pointer to the hardware structure
16 static void iavf_adminq_init_regs(struct iavf_hw *hw) in iavf_adminq_init_regs() argument
19 hw->aq.asq.tail = IAVF_VF_ATQT1; in iavf_adminq_init_regs()
20 hw->aq.asq.head = IAVF_VF_ATQH1; in iavf_adminq_init_regs()
21 hw->aq.asq.len = IAVF_VF_ATQLEN1; in iavf_adminq_init_regs()
22 hw->aq.asq.bal = IAVF_VF_ATQBAL1; in iavf_adminq_init_regs()
23 hw->aq.asq.bah = IAVF_VF_ATQBAH1; in iavf_adminq_init_regs()
24 hw->aq.arq.tail = IAVF_VF_ARQT1; in iavf_adminq_init_regs()
25 hw->aq.arq.head = IAVF_VF_ARQH1; in iavf_adminq_init_regs()
26 hw->aq.arq.len = IAVF_VF_ARQLEN1; in iavf_adminq_init_regs()
27 hw->aq.arq.bal = IAVF_VF_ARQBAL1; in iavf_adminq_init_regs()
28 hw->aq.arq.bah = IAVF_VF_ARQBAH1; in iavf_adminq_init_regs()
33 * @hw: pointer to the hardware structure
35 static enum iavf_status iavf_alloc_adminq_asq_ring(struct iavf_hw *hw) in iavf_alloc_adminq_asq_ring() argument
39 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in iavf_alloc_adminq_asq_ring()
41 (hw->aq.num_asq_entries * in iavf_alloc_adminq_asq_ring()
47 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in iavf_alloc_adminq_asq_ring()
48 (hw->aq.num_asq_entries * in iavf_alloc_adminq_asq_ring()
51 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf); in iavf_alloc_adminq_asq_ring()
60 * @hw: pointer to the hardware structure
62 static enum iavf_status iavf_alloc_adminq_arq_ring(struct iavf_hw *hw) in iavf_alloc_adminq_arq_ring() argument
66 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in iavf_alloc_adminq_arq_ring()
68 (hw->aq.num_arq_entries * in iavf_alloc_adminq_arq_ring()
77 * @hw: pointer to the hardware structure
82 static void iavf_free_adminq_asq(struct iavf_hw *hw) in iavf_free_adminq_asq() argument
84 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf); in iavf_free_adminq_asq()
89 * @hw: pointer to the hardware structure
94 static void iavf_free_adminq_arq(struct iavf_hw *hw) in iavf_free_adminq_arq() argument
96 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf); in iavf_free_adminq_arq()
101 * @hw: pointer to the hardware structure
103 static enum iavf_status iavf_alloc_arq_bufs(struct iavf_hw *hw) in iavf_alloc_arq_bufs() argument
115 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.arq.dma_head, in iavf_alloc_arq_bufs()
116 (hw->aq.num_arq_entries * in iavf_alloc_arq_bufs()
120 hw->aq.arq.r.arq_bi = (struct iavf_dma_mem *)hw->aq.arq.dma_head.va; in iavf_alloc_arq_bufs()
123 for (i = 0; i < hw->aq.num_arq_entries; i++) { in iavf_alloc_arq_bufs()
124 bi = &hw->aq.arq.r.arq_bi[i]; in iavf_alloc_arq_bufs()
125 ret_code = iavf_allocate_dma_mem(hw, bi, in iavf_alloc_arq_bufs()
127 hw->aq.arq_buf_size, in iavf_alloc_arq_bufs()
133 desc = IAVF_ADMINQ_DESC(hw->aq.arq, i); in iavf_alloc_arq_bufs()
136 if (hw->aq.arq_buf_size > IAVF_AQ_LARGE_BUF) in iavf_alloc_arq_bufs()
161 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in iavf_alloc_arq_bufs()
162 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head); in iavf_alloc_arq_bufs()
169 * @hw: pointer to the hardware structure
171 static enum iavf_status iavf_alloc_asq_bufs(struct iavf_hw *hw) in iavf_alloc_asq_bufs() argument
178 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.dma_head, in iavf_alloc_asq_bufs()
179 (hw->aq.num_asq_entries * in iavf_alloc_asq_bufs()
183 hw->aq.asq.r.asq_bi = (struct iavf_dma_mem *)hw->aq.asq.dma_head.va; in iavf_alloc_asq_bufs()
186 for (i = 0; i < hw->aq.num_asq_entries; i++) { in iavf_alloc_asq_bufs()
187 bi = &hw->aq.asq.r.asq_bi[i]; in iavf_alloc_asq_bufs()
188 ret_code = iavf_allocate_dma_mem(hw, bi, in iavf_alloc_asq_bufs()
190 hw->aq.asq_buf_size, in iavf_alloc_asq_bufs()
202 iavf_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in iavf_alloc_asq_bufs()
203 iavf_free_virt_mem(hw, &hw->aq.asq.dma_head); in iavf_alloc_asq_bufs()
210 * @hw: pointer to the hardware structure
212 static void iavf_free_arq_bufs(struct iavf_hw *hw) in iavf_free_arq_bufs() argument
217 for (i = 0; i < hw->aq.num_arq_entries; i++) in iavf_free_arq_bufs()
218 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in iavf_free_arq_bufs()
221 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf); in iavf_free_arq_bufs()
224 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head); in iavf_free_arq_bufs()
229 * @hw: pointer to the hardware structure
231 static void iavf_free_asq_bufs(struct iavf_hw *hw) in iavf_free_asq_bufs() argument
236 for (i = 0; i < hw->aq.num_asq_entries; i++) in iavf_free_asq_bufs()
237 if (hw->aq.asq.r.asq_bi[i].pa) in iavf_free_asq_bufs()
238 iavf_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in iavf_free_asq_bufs()
241 iavf_free_virt_mem(hw, &hw->aq.asq.cmd_buf); in iavf_free_asq_bufs()
244 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf); in iavf_free_asq_bufs()
247 iavf_free_virt_mem(hw, &hw->aq.asq.dma_head); in iavf_free_asq_bufs()
252 * @hw: pointer to the hardware structure
256 static enum iavf_status iavf_config_asq_regs(struct iavf_hw *hw) in iavf_config_asq_regs() argument
262 wr32(hw, hw->aq.asq.head, 0); in iavf_config_asq_regs()
263 wr32(hw, hw->aq.asq.tail, 0); in iavf_config_asq_regs()
266 wr32(hw, hw->aq.asq.len, (hw->aq.num_asq_entries | in iavf_config_asq_regs()
268 wr32(hw, hw->aq.asq.bal, lower_32_bits(hw->aq.asq.desc_buf.pa)); in iavf_config_asq_regs()
269 wr32(hw, hw->aq.asq.bah, upper_32_bits(hw->aq.asq.desc_buf.pa)); in iavf_config_asq_regs()
272 reg = rd32(hw, hw->aq.asq.bal); in iavf_config_asq_regs()
273 if (reg != lower_32_bits(hw->aq.asq.desc_buf.pa)) in iavf_config_asq_regs()
281 * @hw: pointer to the hardware structure
285 static enum iavf_status iavf_config_arq_regs(struct iavf_hw *hw) in iavf_config_arq_regs() argument
291 wr32(hw, hw->aq.arq.head, 0); in iavf_config_arq_regs()
292 wr32(hw, hw->aq.arq.tail, 0); in iavf_config_arq_regs()
295 wr32(hw, hw->aq.arq.len, (hw->aq.num_arq_entries | in iavf_config_arq_regs()
297 wr32(hw, hw->aq.arq.bal, lower_32_bits(hw->aq.arq.desc_buf.pa)); in iavf_config_arq_regs()
298 wr32(hw, hw->aq.arq.bah, upper_32_bits(hw->aq.arq.desc_buf.pa)); in iavf_config_arq_regs()
300 /* Update tail in the HW to post pre-allocated buffers */ in iavf_config_arq_regs()
301 wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); in iavf_config_arq_regs()
304 reg = rd32(hw, hw->aq.arq.bal); in iavf_config_arq_regs()
305 if (reg != lower_32_bits(hw->aq.arq.desc_buf.pa)) in iavf_config_arq_regs()
313 * @hw: pointer to the hardware structure
317 * in the hw->aq structure:
318 * - hw->aq.num_asq_entries
319 * - hw->aq.arq_buf_size
324 static enum iavf_status iavf_init_asq(struct iavf_hw *hw) in iavf_init_asq() argument
328 if (hw->aq.asq.count > 0) { in iavf_init_asq()
335 if ((hw->aq.num_asq_entries == 0) || in iavf_init_asq()
336 (hw->aq.asq_buf_size == 0)) { in iavf_init_asq()
341 hw->aq.asq.next_to_use = 0; in iavf_init_asq()
342 hw->aq.asq.next_to_clean = 0; in iavf_init_asq()
345 ret_code = iavf_alloc_adminq_asq_ring(hw); in iavf_init_asq()
350 ret_code = iavf_alloc_asq_bufs(hw); in iavf_init_asq()
355 ret_code = iavf_config_asq_regs(hw); in iavf_init_asq()
360 hw->aq.asq.count = hw->aq.num_asq_entries; in iavf_init_asq()
364 iavf_free_adminq_asq(hw); in iavf_init_asq()
372 * @hw: pointer to the hardware structure
376 * in the hw->aq structure:
377 * - hw->aq.num_asq_entries
378 * - hw->aq.arq_buf_size
383 static enum iavf_status iavf_init_arq(struct iavf_hw *hw) in iavf_init_arq() argument
387 if (hw->aq.arq.count > 0) { in iavf_init_arq()
394 if ((hw->aq.num_arq_entries == 0) || in iavf_init_arq()
395 (hw->aq.arq_buf_size == 0)) { in iavf_init_arq()
400 hw->aq.arq.next_to_use = 0; in iavf_init_arq()
401 hw->aq.arq.next_to_clean = 0; in iavf_init_arq()
404 ret_code = iavf_alloc_adminq_arq_ring(hw); in iavf_init_arq()
409 ret_code = iavf_alloc_arq_bufs(hw); in iavf_init_arq()
414 ret_code = iavf_config_arq_regs(hw); in iavf_init_arq()
419 hw->aq.arq.count = hw->aq.num_arq_entries; in iavf_init_arq()
423 iavf_free_adminq_arq(hw); in iavf_init_arq()
431 * @hw: pointer to the hardware structure
435 static enum iavf_status iavf_shutdown_asq(struct iavf_hw *hw) in iavf_shutdown_asq() argument
439 mutex_lock(&hw->aq.asq_mutex); in iavf_shutdown_asq()
441 if (hw->aq.asq.count == 0) { in iavf_shutdown_asq()
447 wr32(hw, hw->aq.asq.head, 0); in iavf_shutdown_asq()
448 wr32(hw, hw->aq.asq.tail, 0); in iavf_shutdown_asq()
449 wr32(hw, hw->aq.asq.len, 0); in iavf_shutdown_asq()
450 wr32(hw, hw->aq.asq.bal, 0); in iavf_shutdown_asq()
451 wr32(hw, hw->aq.asq.bah, 0); in iavf_shutdown_asq()
453 hw->aq.asq.count = 0; /* to indicate uninitialized queue */ in iavf_shutdown_asq()
456 iavf_free_asq_bufs(hw); in iavf_shutdown_asq()
459 mutex_unlock(&hw->aq.asq_mutex); in iavf_shutdown_asq()
465 * @hw: pointer to the hardware structure
469 static enum iavf_status iavf_shutdown_arq(struct iavf_hw *hw) in iavf_shutdown_arq() argument
473 mutex_lock(&hw->aq.arq_mutex); in iavf_shutdown_arq()
475 if (hw->aq.arq.count == 0) { in iavf_shutdown_arq()
481 wr32(hw, hw->aq.arq.head, 0); in iavf_shutdown_arq()
482 wr32(hw, hw->aq.arq.tail, 0); in iavf_shutdown_arq()
483 wr32(hw, hw->aq.arq.len, 0); in iavf_shutdown_arq()
484 wr32(hw, hw->aq.arq.bal, 0); in iavf_shutdown_arq()
485 wr32(hw, hw->aq.arq.bah, 0); in iavf_shutdown_arq()
487 hw->aq.arq.count = 0; /* to indicate uninitialized queue */ in iavf_shutdown_arq()
490 iavf_free_arq_bufs(hw); in iavf_shutdown_arq()
493 mutex_unlock(&hw->aq.arq_mutex); in iavf_shutdown_arq()
499 * @hw: pointer to the hardware structure
502 * in the hw->aq structure:
503 * - hw->aq.num_asq_entries
504 * - hw->aq.num_arq_entries
505 * - hw->aq.arq_buf_size
506 * - hw->aq.asq_buf_size
508 enum iavf_status iavf_init_adminq(struct iavf_hw *hw) in iavf_init_adminq() argument
513 if ((hw->aq.num_arq_entries == 0) || in iavf_init_adminq()
514 (hw->aq.num_asq_entries == 0) || in iavf_init_adminq()
515 (hw->aq.arq_buf_size == 0) || in iavf_init_adminq()
516 (hw->aq.asq_buf_size == 0)) { in iavf_init_adminq()
522 iavf_adminq_init_regs(hw); in iavf_init_adminq()
525 hw->aq.asq_cmd_timeout = IAVF_ASQ_CMD_TIMEOUT; in iavf_init_adminq()
528 ret_code = iavf_init_asq(hw); in iavf_init_adminq()
533 ret_code = iavf_init_arq(hw); in iavf_init_adminq()
541 iavf_shutdown_asq(hw); in iavf_init_adminq()
550 * @hw: pointer to the hardware structure
552 enum iavf_status iavf_shutdown_adminq(struct iavf_hw *hw) in iavf_shutdown_adminq() argument
556 if (iavf_check_asq_alive(hw)) in iavf_shutdown_adminq()
557 iavf_aq_queue_shutdown(hw, true); in iavf_shutdown_adminq()
559 iavf_shutdown_asq(hw); in iavf_shutdown_adminq()
560 iavf_shutdown_arq(hw); in iavf_shutdown_adminq()
567 * @hw: pointer to the hardware structure
571 static u16 iavf_clean_asq(struct iavf_hw *hw) in iavf_clean_asq() argument
573 struct iavf_adminq_ring *asq = &hw->aq.asq; in iavf_clean_asq()
581 while (rd32(hw, hw->aq.asq.head) != ntc) { in iavf_clean_asq()
582 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_clean_asq()
583 "ntc %d head %d.\n", ntc, rd32(hw, hw->aq.asq.head)); in iavf_clean_asq()
589 cb_func(hw, &desc_cb); in iavf_clean_asq()
608 * @hw: pointer to the hw struct
613 bool iavf_asq_done(struct iavf_hw *hw) in iavf_asq_done() argument
618 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in iavf_asq_done()
623 * @hw: pointer to the hw struct
632 enum iavf_status iavf_asq_send_command(struct iavf_hw *hw, in iavf_asq_send_command() argument
646 mutex_lock(&hw->aq.asq_mutex); in iavf_asq_send_command()
648 if (hw->aq.asq.count == 0) { in iavf_asq_send_command()
649 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
655 hw->aq.asq_last_status = IAVF_AQ_RC_OK; in iavf_asq_send_command()
657 val = rd32(hw, hw->aq.asq.head); in iavf_asq_send_command()
658 if (val >= hw->aq.num_asq_entries) { in iavf_asq_send_command()
659 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
665 details = IAVF_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command()
687 if (buff_size > hw->aq.asq_buf_size) { in iavf_asq_send_command()
688 iavf_debug(hw, in iavf_asq_send_command()
697 iavf_debug(hw, in iavf_asq_send_command()
711 if (iavf_clean_asq(hw) == 0) { in iavf_asq_send_command()
712 iavf_debug(hw, in iavf_asq_send_command()
720 desc_on_ring = IAVF_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command()
727 dma_buff = &hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]; in iavf_asq_send_command()
742 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, "AQTX: desc and buffer:\n"); in iavf_asq_send_command()
743 iavf_debug_aq(hw, IAVF_DEBUG_AQ_COMMAND, (void *)desc_on_ring, in iavf_asq_send_command()
745 (hw->aq.asq.next_to_use)++; in iavf_asq_send_command()
746 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in iavf_asq_send_command()
747 hw->aq.asq.next_to_use = 0; in iavf_asq_send_command()
749 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in iavf_asq_send_command()
761 if (iavf_asq_done(hw)) in iavf_asq_send_command()
765 } while (total_delay < hw->aq.asq_cmd_timeout); in iavf_asq_send_command()
769 if (iavf_asq_done(hw)) { in iavf_asq_send_command()
775 iavf_debug(hw, in iavf_asq_send_command()
790 hw->aq.asq_last_status = (enum iavf_admin_queue_err)retval; in iavf_asq_send_command()
793 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
795 iavf_debug_aq(hw, IAVF_DEBUG_AQ_COMMAND, (void *)desc, buff, buff_size); in iavf_asq_send_command()
804 if (rd32(hw, hw->aq.asq.len) & IAVF_VF_ATQLEN1_ATQCRIT_MASK) { in iavf_asq_send_command()
805 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
809 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
816 mutex_unlock(&hw->aq.asq_mutex); in iavf_asq_send_command()
837 * @hw: pointer to the hw struct
845 enum iavf_status iavf_clean_arq_element(struct iavf_hw *hw, in iavf_clean_arq_element() argument
849 u16 ntc = hw->aq.arq.next_to_clean; in iavf_clean_arq_element()
862 mutex_lock(&hw->aq.arq_mutex); in iavf_clean_arq_element()
864 if (hw->aq.arq.count == 0) { in iavf_clean_arq_element()
865 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_clean_arq_element()
872 ntu = rd32(hw, hw->aq.arq.head) & IAVF_VF_ARQH1_ARQH_MASK; in iavf_clean_arq_element()
880 desc = IAVF_ADMINQ_DESC(hw->aq.arq, ntc); in iavf_clean_arq_element()
883 hw->aq.arq_last_status = in iavf_clean_arq_element()
888 iavf_debug(hw, in iavf_clean_arq_element()
891 hw->aq.arq_last_status); in iavf_clean_arq_element()
898 memcpy(e->msg_buf, hw->aq.arq.r.arq_bi[desc_idx].va, in iavf_clean_arq_element()
901 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, "AQRX: desc and buffer:\n"); in iavf_clean_arq_element()
902 iavf_debug_aq(hw, IAVF_DEBUG_AQ_COMMAND, (void *)desc, e->msg_buf, in iavf_clean_arq_element()
903 hw->aq.arq_buf_size); in iavf_clean_arq_element()
909 bi = &hw->aq.arq.r.arq_bi[ntc]; in iavf_clean_arq_element()
913 if (hw->aq.arq_buf_size > IAVF_AQ_LARGE_BUF) in iavf_clean_arq_element()
920 wr32(hw, hw->aq.arq.tail, ntc); in iavf_clean_arq_element()
923 if (ntc == hw->aq.num_arq_entries) in iavf_clean_arq_element()
925 hw->aq.arq.next_to_clean = ntc; in iavf_clean_arq_element()
926 hw->aq.arq.next_to_use = ntu; in iavf_clean_arq_element()
931 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in iavf_clean_arq_element()
934 mutex_unlock(&hw->aq.arq_mutex); in iavf_clean_arq_element()