Lines Matching full:hw
40 * @hw: pointer to the hardware structure
44 STATIC void iavf_adminq_init_regs(struct iavf_hw *hw) in iavf_adminq_init_regs() argument
47 hw->aq.asq.tail = IAVF_VF_ATQT1; in iavf_adminq_init_regs()
48 hw->aq.asq.head = IAVF_VF_ATQH1; in iavf_adminq_init_regs()
49 hw->aq.asq.len = IAVF_VF_ATQLEN1; in iavf_adminq_init_regs()
50 hw->aq.asq.bal = IAVF_VF_ATQBAL1; in iavf_adminq_init_regs()
51 hw->aq.asq.bah = IAVF_VF_ATQBAH1; in iavf_adminq_init_regs()
52 hw->aq.arq.tail = IAVF_VF_ARQT1; in iavf_adminq_init_regs()
53 hw->aq.arq.head = IAVF_VF_ARQH1; in iavf_adminq_init_regs()
54 hw->aq.arq.len = IAVF_VF_ARQLEN1; in iavf_adminq_init_regs()
55 hw->aq.arq.bal = IAVF_VF_ARQBAL1; in iavf_adminq_init_regs()
56 hw->aq.arq.bah = IAVF_VF_ARQBAH1; in iavf_adminq_init_regs()
61 * @hw: pointer to the hardware structure
63 enum iavf_status iavf_alloc_adminq_asq_ring(struct iavf_hw *hw) in iavf_alloc_adminq_asq_ring() argument
67 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.asq.desc_buf, in iavf_alloc_adminq_asq_ring()
69 (hw->aq.num_asq_entries * in iavf_alloc_adminq_asq_ring()
75 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.cmd_buf, in iavf_alloc_adminq_asq_ring()
76 (hw->aq.num_asq_entries * in iavf_alloc_adminq_asq_ring()
79 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf); in iavf_alloc_adminq_asq_ring()
88 * @hw: pointer to the hardware structure
90 enum iavf_status iavf_alloc_adminq_arq_ring(struct iavf_hw *hw) in iavf_alloc_adminq_arq_ring() argument
94 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in iavf_alloc_adminq_arq_ring()
96 (hw->aq.num_arq_entries * in iavf_alloc_adminq_arq_ring()
105 * @hw: pointer to the hardware structure
110 void iavf_free_adminq_asq(struct iavf_hw *hw) in iavf_free_adminq_asq() argument
112 iavf_free_virt_mem(hw, &hw->aq.asq.cmd_buf); in iavf_free_adminq_asq()
113 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf); in iavf_free_adminq_asq()
118 * @hw: pointer to the hardware structure
123 void iavf_free_adminq_arq(struct iavf_hw *hw) in iavf_free_adminq_arq() argument
125 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf); in iavf_free_adminq_arq()
130 * @hw: pointer to the hardware structure
132 STATIC enum iavf_status iavf_alloc_arq_bufs(struct iavf_hw *hw) in iavf_alloc_arq_bufs() argument
144 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.arq.dma_head, in iavf_alloc_arq_bufs()
145 (hw->aq.num_arq_entries * sizeof(struct iavf_dma_mem))); in iavf_alloc_arq_bufs()
148 hw->aq.arq.r.arq_bi = (struct iavf_dma_mem *)hw->aq.arq.dma_head.va; in iavf_alloc_arq_bufs()
151 for (i = 0; i < hw->aq.num_arq_entries; i++) { in iavf_alloc_arq_bufs()
152 bi = &hw->aq.arq.r.arq_bi[i]; in iavf_alloc_arq_bufs()
153 ret_code = iavf_allocate_dma_mem(hw, bi, in iavf_alloc_arq_bufs()
155 hw->aq.arq_buf_size, in iavf_alloc_arq_bufs()
161 desc = IAVF_ADMINQ_DESC(hw->aq.arq, i); in iavf_alloc_arq_bufs()
164 if (hw->aq.arq_buf_size > IAVF_AQ_LARGE_BUF) in iavf_alloc_arq_bufs()
189 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in iavf_alloc_arq_bufs()
190 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head); in iavf_alloc_arq_bufs()
197 * @hw: pointer to the hardware structure
199 STATIC enum iavf_status iavf_alloc_asq_bufs(struct iavf_hw *hw) in iavf_alloc_asq_bufs() argument
206 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.asq.dma_head, in iavf_alloc_asq_bufs()
207 (hw->aq.num_asq_entries * sizeof(struct iavf_dma_mem))); in iavf_alloc_asq_bufs()
210 hw->aq.asq.r.asq_bi = (struct iavf_dma_mem *)hw->aq.asq.dma_head.va; in iavf_alloc_asq_bufs()
213 for (i = 0; i < hw->aq.num_asq_entries; i++) { in iavf_alloc_asq_bufs()
214 bi = &hw->aq.asq.r.asq_bi[i]; in iavf_alloc_asq_bufs()
215 ret_code = iavf_allocate_dma_mem(hw, bi, in iavf_alloc_asq_bufs()
217 hw->aq.asq_buf_size, in iavf_alloc_asq_bufs()
229 iavf_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in iavf_alloc_asq_bufs()
230 iavf_free_virt_mem(hw, &hw->aq.asq.dma_head); in iavf_alloc_asq_bufs()
237 * @hw: pointer to the hardware structure
239 STATIC void iavf_free_arq_bufs(struct iavf_hw *hw) in iavf_free_arq_bufs() argument
244 for (i = 0; i < hw->aq.num_arq_entries; i++) in iavf_free_arq_bufs()
245 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in iavf_free_arq_bufs()
248 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf); in iavf_free_arq_bufs()
251 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head); in iavf_free_arq_bufs()
256 * @hw: pointer to the hardware structure
258 STATIC void iavf_free_asq_bufs(struct iavf_hw *hw) in iavf_free_asq_bufs() argument
263 for (i = 0; i < hw->aq.num_asq_entries; i++) in iavf_free_asq_bufs()
264 if (hw->aq.asq.r.asq_bi[i].pa) in iavf_free_asq_bufs()
265 iavf_free_dma_mem(hw, &hw->aq.asq.r.asq_bi[i]); in iavf_free_asq_bufs()
268 iavf_free_virt_mem(hw, &hw->aq.asq.cmd_buf); in iavf_free_asq_bufs()
271 iavf_free_dma_mem(hw, &hw->aq.asq.desc_buf); in iavf_free_asq_bufs()
274 iavf_free_virt_mem(hw, &hw->aq.asq.dma_head); in iavf_free_asq_bufs()
279 * @hw: pointer to the hardware structure
283 STATIC enum iavf_status iavf_config_asq_regs(struct iavf_hw *hw) in iavf_config_asq_regs() argument
289 wr32(hw, hw->aq.asq.head, 0); in iavf_config_asq_regs()
290 wr32(hw, hw->aq.asq.tail, 0); in iavf_config_asq_regs()
293 wr32(hw, hw->aq.asq.len, (hw->aq.num_asq_entries | in iavf_config_asq_regs()
295 wr32(hw, hw->aq.asq.bal, IAVF_LO_DWORD(hw->aq.asq.desc_buf.pa)); in iavf_config_asq_regs()
296 wr32(hw, hw->aq.asq.bah, IAVF_HI_DWORD(hw->aq.asq.desc_buf.pa)); in iavf_config_asq_regs()
299 reg = rd32(hw, hw->aq.asq.bal); in iavf_config_asq_regs()
300 if (reg != IAVF_LO_DWORD(hw->aq.asq.desc_buf.pa)) in iavf_config_asq_regs()
308 * @hw: pointer to the hardware structure
312 STATIC enum iavf_status iavf_config_arq_regs(struct iavf_hw *hw) in iavf_config_arq_regs() argument
318 wr32(hw, hw->aq.arq.head, 0); in iavf_config_arq_regs()
319 wr32(hw, hw->aq.arq.tail, 0); in iavf_config_arq_regs()
322 wr32(hw, hw->aq.arq.len, (hw->aq.num_arq_entries | in iavf_config_arq_regs()
324 wr32(hw, hw->aq.arq.bal, IAVF_LO_DWORD(hw->aq.arq.desc_buf.pa)); in iavf_config_arq_regs()
325 wr32(hw, hw->aq.arq.bah, IAVF_HI_DWORD(hw->aq.arq.desc_buf.pa)); in iavf_config_arq_regs()
327 /* Update tail in the HW to post pre-allocated buffers */ in iavf_config_arq_regs()
328 wr32(hw, hw->aq.arq.tail, hw->aq.num_arq_entries - 1); in iavf_config_arq_regs()
331 reg = rd32(hw, hw->aq.arq.bal); in iavf_config_arq_regs()
332 if (reg != IAVF_LO_DWORD(hw->aq.arq.desc_buf.pa)) in iavf_config_arq_regs()
340 * @hw: pointer to the hardware structure
344 * in the hw->aq structure:
345 * - hw->aq.num_asq_entries
346 * - hw->aq.arq_buf_size
351 enum iavf_status iavf_init_asq(struct iavf_hw *hw) in iavf_init_asq() argument
355 if (hw->aq.asq.count > 0) { in iavf_init_asq()
362 if ((hw->aq.num_asq_entries == 0) || in iavf_init_asq()
363 (hw->aq.asq_buf_size == 0)) { in iavf_init_asq()
368 hw->aq.asq.next_to_use = 0; in iavf_init_asq()
369 hw->aq.asq.next_to_clean = 0; in iavf_init_asq()
372 ret_code = iavf_alloc_adminq_asq_ring(hw); in iavf_init_asq()
377 ret_code = iavf_alloc_asq_bufs(hw); in iavf_init_asq()
382 ret_code = iavf_config_asq_regs(hw); in iavf_init_asq()
387 hw->aq.asq.count = hw->aq.num_asq_entries; in iavf_init_asq()
391 iavf_free_adminq_asq(hw); in iavf_init_asq()
395 iavf_free_asq_bufs(hw); in iavf_init_asq()
403 * @hw: pointer to the hardware structure
407 * in the hw->aq structure:
408 * - hw->aq.num_asq_entries
409 * - hw->aq.arq_buf_size
414 enum iavf_status iavf_init_arq(struct iavf_hw *hw) in iavf_init_arq() argument
418 if (hw->aq.arq.count > 0) { in iavf_init_arq()
425 if ((hw->aq.num_arq_entries == 0) || in iavf_init_arq()
426 (hw->aq.arq_buf_size == 0)) { in iavf_init_arq()
431 hw->aq.arq.next_to_use = 0; in iavf_init_arq()
432 hw->aq.arq.next_to_clean = 0; in iavf_init_arq()
435 ret_code = iavf_alloc_adminq_arq_ring(hw); in iavf_init_arq()
440 ret_code = iavf_alloc_arq_bufs(hw); in iavf_init_arq()
445 ret_code = iavf_config_arq_regs(hw); in iavf_init_arq()
450 hw->aq.arq.count = hw->aq.num_arq_entries; in iavf_init_arq()
454 iavf_free_adminq_arq(hw); in iavf_init_arq()
462 * @hw: pointer to the hardware structure
466 enum iavf_status iavf_shutdown_asq(struct iavf_hw *hw) in iavf_shutdown_asq() argument
470 iavf_acquire_spinlock(&hw->aq.asq_spinlock); in iavf_shutdown_asq()
472 if (hw->aq.asq.count == 0) { in iavf_shutdown_asq()
478 wr32(hw, hw->aq.asq.head, 0); in iavf_shutdown_asq()
479 wr32(hw, hw->aq.asq.tail, 0); in iavf_shutdown_asq()
480 wr32(hw, hw->aq.asq.len, 0); in iavf_shutdown_asq()
481 wr32(hw, hw->aq.asq.bal, 0); in iavf_shutdown_asq()
482 wr32(hw, hw->aq.asq.bah, 0); in iavf_shutdown_asq()
484 hw->aq.asq.count = 0; /* to indicate uninitialized queue */ in iavf_shutdown_asq()
487 iavf_free_asq_bufs(hw); in iavf_shutdown_asq()
490 iavf_release_spinlock(&hw->aq.asq_spinlock); in iavf_shutdown_asq()
496 * @hw: pointer to the hardware structure
500 enum iavf_status iavf_shutdown_arq(struct iavf_hw *hw) in iavf_shutdown_arq() argument
504 iavf_acquire_spinlock(&hw->aq.arq_spinlock); in iavf_shutdown_arq()
506 if (hw->aq.arq.count == 0) { in iavf_shutdown_arq()
512 wr32(hw, hw->aq.arq.head, 0); in iavf_shutdown_arq()
513 wr32(hw, hw->aq.arq.tail, 0); in iavf_shutdown_arq()
514 wr32(hw, hw->aq.arq.len, 0); in iavf_shutdown_arq()
515 wr32(hw, hw->aq.arq.bal, 0); in iavf_shutdown_arq()
516 wr32(hw, hw->aq.arq.bah, 0); in iavf_shutdown_arq()
518 hw->aq.arq.count = 0; /* to indicate uninitialized queue */ in iavf_shutdown_arq()
521 iavf_free_arq_bufs(hw); in iavf_shutdown_arq()
524 iavf_release_spinlock(&hw->aq.arq_spinlock); in iavf_shutdown_arq()
530 * @hw: pointer to the hardware structure
533 * in the hw->aq structure:
534 * - hw->aq.num_asq_entries
535 * - hw->aq.num_arq_entries
536 * - hw->aq.arq_buf_size
537 * - hw->aq.asq_buf_size
539 enum iavf_status iavf_init_adminq(struct iavf_hw *hw) in iavf_init_adminq() argument
544 if ((hw->aq.num_arq_entries == 0) || in iavf_init_adminq()
545 (hw->aq.num_asq_entries == 0) || in iavf_init_adminq()
546 (hw->aq.arq_buf_size == 0) || in iavf_init_adminq()
547 (hw->aq.asq_buf_size == 0)) { in iavf_init_adminq()
551 iavf_init_spinlock(&hw->aq.asq_spinlock); in iavf_init_adminq()
552 iavf_init_spinlock(&hw->aq.arq_spinlock); in iavf_init_adminq()
555 iavf_adminq_init_regs(hw); in iavf_init_adminq()
558 hw->aq.asq_cmd_timeout = IAVF_ASQ_CMD_TIMEOUT; in iavf_init_adminq()
561 ret_code = iavf_init_asq(hw); in iavf_init_adminq()
566 ret_code = iavf_init_arq(hw); in iavf_init_adminq()
574 iavf_shutdown_asq(hw); in iavf_init_adminq()
576 iavf_destroy_spinlock(&hw->aq.asq_spinlock); in iavf_init_adminq()
577 iavf_destroy_spinlock(&hw->aq.arq_spinlock); in iavf_init_adminq()
585 * @hw: pointer to the hardware structure
587 enum iavf_status iavf_shutdown_adminq(struct iavf_hw *hw) in iavf_shutdown_adminq() argument
591 if (iavf_check_asq_alive(hw)) in iavf_shutdown_adminq()
592 iavf_aq_queue_shutdown(hw, true); in iavf_shutdown_adminq()
594 iavf_shutdown_asq(hw); in iavf_shutdown_adminq()
595 iavf_shutdown_arq(hw); in iavf_shutdown_adminq()
596 iavf_destroy_spinlock(&hw->aq.asq_spinlock); in iavf_shutdown_adminq()
597 iavf_destroy_spinlock(&hw->aq.arq_spinlock); in iavf_shutdown_adminq()
604 * @hw: pointer to the hardware structure
608 u16 iavf_clean_asq(struct iavf_hw *hw) in iavf_clean_asq() argument
610 struct iavf_adminq_ring *asq = &(hw->aq.asq); in iavf_clean_asq()
618 while (rd32(hw, hw->aq.asq.head) != ntc) { in iavf_clean_asq()
619 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_clean_asq()
620 "ntc %d head %d.\n", ntc, rd32(hw, hw->aq.asq.head)); in iavf_clean_asq()
627 cb_func(hw, &desc_cb); in iavf_clean_asq()
645 * @hw: pointer to the hw struct
650 bool iavf_asq_done(struct iavf_hw *hw) in iavf_asq_done() argument
655 return rd32(hw, hw->aq.asq.head) == hw->aq.asq.next_to_use; in iavf_asq_done()
661 * @hw: pointer to the hw struct
670 enum iavf_status iavf_asq_send_command(struct iavf_hw *hw, in iavf_asq_send_command() argument
684 iavf_acquire_spinlock(&hw->aq.asq_spinlock); in iavf_asq_send_command()
686 hw->aq.asq_last_status = IAVF_AQ_RC_OK; in iavf_asq_send_command()
688 if (hw->aq.asq.count == 0) { in iavf_asq_send_command()
689 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
695 val = rd32(hw, hw->aq.asq.head); in iavf_asq_send_command()
696 if (val >= hw->aq.num_asq_entries) { in iavf_asq_send_command()
697 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
703 details = IAVF_ADMINQ_DETAILS(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command()
730 if (buff_size > hw->aq.asq_buf_size) { in iavf_asq_send_command()
731 iavf_debug(hw, in iavf_asq_send_command()
740 iavf_debug(hw, in iavf_asq_send_command()
754 if (iavf_clean_asq(hw) == 0) { in iavf_asq_send_command()
755 iavf_debug(hw, in iavf_asq_send_command()
763 desc_on_ring = IAVF_ADMINQ_DESC(hw->aq.asq, hw->aq.asq.next_to_use); in iavf_asq_send_command()
771 dma_buff = &(hw->aq.asq.r.asq_bi[hw->aq.asq.next_to_use]); in iavf_asq_send_command()
787 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, "AQTX: desc and buffer:\n"); in iavf_asq_send_command()
788 iavf_debug_aq(hw, IAVF_DEBUG_AQ_COMMAND, (void *)desc_on_ring, in iavf_asq_send_command()
790 (hw->aq.asq.next_to_use)++; in iavf_asq_send_command()
791 if (hw->aq.asq.next_to_use == hw->aq.asq.count) in iavf_asq_send_command()
792 hw->aq.asq.next_to_use = 0; in iavf_asq_send_command()
794 wr32(hw, hw->aq.asq.tail, hw->aq.asq.next_to_use); in iavf_asq_send_command()
806 if (iavf_asq_done(hw)) in iavf_asq_send_command()
810 } while (total_delay < hw->aq.asq_cmd_timeout); in iavf_asq_send_command()
814 if (iavf_asq_done(hw)) { in iavf_asq_send_command()
822 iavf_debug(hw, in iavf_asq_send_command()
837 hw->aq.asq_last_status = (enum iavf_admin_queue_err)retval; in iavf_asq_send_command()
840 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
842 iavf_debug_aq(hw, IAVF_DEBUG_AQ_COMMAND, (void *)desc, buff, buff_size); in iavf_asq_send_command()
852 if (rd32(hw, hw->aq.asq.len) & IAVF_VF_ATQLEN1_ATQCRIT_MASK) { in iavf_asq_send_command()
853 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
857 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_asq_send_command()
864 iavf_release_spinlock(&hw->aq.asq_spinlock); in iavf_asq_send_command()
887 * @hw: pointer to the hw struct
895 enum iavf_status iavf_clean_arq_element(struct iavf_hw *hw, in iavf_clean_arq_element() argument
900 u16 ntc = hw->aq.arq.next_to_clean; in iavf_clean_arq_element()
912 iavf_acquire_spinlock(&hw->aq.arq_spinlock); in iavf_clean_arq_element()
914 if (hw->aq.arq.count == 0) { in iavf_clean_arq_element()
915 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, in iavf_clean_arq_element()
922 ntu = rd32(hw, hw->aq.arq.head) & IAVF_VF_ARQH1_ARQH_MASK; in iavf_clean_arq_element()
930 desc = IAVF_ADMINQ_DESC(hw->aq.arq, ntc); in iavf_clean_arq_element()
933 hw->aq.arq_last_status = in iavf_clean_arq_element()
938 iavf_debug(hw, in iavf_clean_arq_element()
941 hw->aq.arq_last_status); in iavf_clean_arq_element()
950 hw->aq.arq.r.arq_bi[desc_idx].va, in iavf_clean_arq_element()
953 iavf_debug(hw, IAVF_DEBUG_AQ_MESSAGE, "AQRX: desc and buffer:\n"); in iavf_clean_arq_element()
954 iavf_debug_aq(hw, IAVF_DEBUG_AQ_COMMAND, (void *)desc, e->msg_buf, in iavf_clean_arq_element()
955 hw->aq.arq_buf_size); in iavf_clean_arq_element()
961 bi = &hw->aq.arq.r.arq_bi[ntc]; in iavf_clean_arq_element()
965 if (hw->aq.arq_buf_size > IAVF_AQ_LARGE_BUF) in iavf_clean_arq_element()
972 wr32(hw, hw->aq.arq.tail, ntc); in iavf_clean_arq_element()
975 if (ntc == hw->aq.num_arq_entries) in iavf_clean_arq_element()
977 hw->aq.arq.next_to_clean = ntc; in iavf_clean_arq_element()
978 hw->aq.arq.next_to_use = ntu; in iavf_clean_arq_element()
983 *pending = (ntc > ntu ? hw->aq.arq.count : 0) + (ntu - ntc); in iavf_clean_arq_element()
985 iavf_release_spinlock(&hw->aq.arq_spinlock); in iavf_clean_arq_element()