Home
last modified time | relevance | path

Searched refs:p_virt (Results 1 – 11 of 11) sorted by relevance

/freebsd/sys/dev/qlnx/qlnxe/
H A Decore_cxt.c188 void *p_virt; member
954 if (p_mngr->t2[i].p_virt) in ecore_cxt_src_t2_free()
956 p_mngr->t2[i].p_virt, in ecore_cxt_src_t2_free()
1003 void **p_virt = &p_mngr->t2[i].p_virt; in ecore_cxt_src_t2_alloc() local
1005 *p_virt = OSAL_DMA_ALLOC_COHERENT(p_hwfn->p_dev, in ecore_cxt_src_t2_alloc()
1008 if (!p_mngr->t2[i].p_virt) { in ecore_cxt_src_t2_alloc()
1012 OSAL_MEM_ZERO(*p_virt, size); in ecore_cxt_src_t2_alloc()
1031 struct src_ent *entries = p_mngr->t2[i].p_virt; in ecore_cxt_src_t2_alloc()
1090 if (p_dma->p_virt) in ecore_ilt_shadow_free()
1092 p_dma->p_virt, in ecore_ilt_shadow_free()
[all …]
H A Decore_spq.c578 struct ecore_spq_entry *p_virt = OSAL_NULL; in ecore_spq_setup() local
593 p_virt = p_spq->p_virt; in ecore_spq_setup()
597 DMA_REGPAIR_LE(p_virt->elem.data_ptr, p_phys); in ecore_spq_setup()
599 OSAL_LIST_PUSH_TAIL(&p_virt->list, &p_spq->free_pool); in ecore_spq_setup()
601 p_virt++; in ecore_spq_setup()
643 struct ecore_spq_entry *p_virt = OSAL_NULL; in ecore_spq_alloc() local
670 p_virt = OSAL_DMA_ALLOC_COHERENT(p_hwfn->p_dev, &p_phys, in ecore_spq_alloc()
673 if (!p_virt) { in ecore_spq_alloc()
677 p_spq->p_virt = p_virt; in ecore_spq_alloc()
707 if (p_spq->p_virt) { in ecore_spq_free()
[all …]
H A Decore_hw.c1096 void *p_virt; in ecore_dmae_sanity() local
1099 p_virt = OSAL_DMA_ALLOC_COHERENT(p_hwfn->p_dev, &p_phys, 2 * size); in ecore_dmae_sanity()
1100 if (!p_virt) { in ecore_dmae_sanity()
1108 for (p_tmp = (u32 *)p_virt; in ecore_dmae_sanity()
1109 p_tmp < (u32 *)((u8 *)p_virt + size); in ecore_dmae_sanity()
1117 OSAL_MEM_ZERO((u8 *)p_virt + size, size); in ecore_dmae_sanity()
1121 phase, (unsigned long long)p_phys, p_virt, in ecore_dmae_sanity()
1122 (unsigned long long)(p_phys + size), (u8 *)p_virt + size, in ecore_dmae_sanity()
1136 for (p_tmp = (u32 *)((u8 *)p_virt + size); in ecore_dmae_sanity()
1137 p_tmp < (u32 *)((u8 *)p_virt + (2 * size)); in ecore_dmae_sanity()
[all …]
H A Decore_sriov.c377 p_bulletin = p_vf->bulletin.p_virt; in ecore_iov_post_vf_bulletin()
516 vf->bulletin.p_virt = p_bulletin_virt + idx; in ecore_iov_setup_vfdb()
1073 p_bulletin = p_vf->bulletin.p_virt; in ecore_iov_set_link()
1251 if (vf->bulletin.p_virt) in ecore_iov_release_hw_for_vf()
1252 OSAL_MEMSET(vf->bulletin.p_virt, 0, in ecore_iov_release_hw_for_vf()
1253 sizeof(*vf->bulletin.p_virt)); in ecore_iov_release_hw_for_vf()
2023 p_vf->bulletin.p_virt->mac, in ecore_iov_configure_vport_forced()
2048 filter.vlan = p_vf->bulletin.p_virt->pvid; in ecore_iov_configure_vport_forced()
2171 p_bitmap = &vf_info->bulletin.p_virt->valid_bitmap; in ecore_iov_vf_mbx_start_vport()
2175 vf_info->bulletin.p_virt->default_only_untagged = vf_req; in ecore_iov_vf_mbx_start_vport()
[all …]
H A Decore_dev.c6086 void *p_virt = p_chain->p_virt_addr, *p_virt_next = OSAL_NULL; in ecore_chain_free_next_ptr() local
6091 if (!p_virt) in ecore_chain_free_next_ptr()
6097 if (!p_virt) in ecore_chain_free_next_ptr()
6100 p_next = (struct ecore_chain_next *)((u8 *)p_virt + size); in ecore_chain_free_next_ptr()
6104 OSAL_DMA_FREE_COHERENT(p_dev, p_virt, p_phys, in ecore_chain_free_next_ptr()
6107 p_virt = p_virt_next; in ecore_chain_free_next_ptr()
6202 void *p_virt = OSAL_NULL, *p_virt_prev = OSAL_NULL; in ecore_chain_alloc_next_ptr() local
6207 p_virt = OSAL_DMA_ALLOC_COHERENT(p_dev, &p_phys, in ecore_chain_alloc_next_ptr()
6209 if (!p_virt) { in ecore_chain_alloc_next_ptr()
6216 ecore_chain_init_mem(p_chain, p_virt, p_phys); in ecore_chain_alloc_next_ptr()
[all …]
H A Decore_int.c1410 void *p_virt; in ecore_int_sb_attn_alloc() local
1420 p_virt = OSAL_DMA_ALLOC_COHERENT(p_dev, &p_phys, in ecore_int_sb_attn_alloc()
1422 if (!p_virt) { in ecore_int_sb_attn_alloc()
1430 ecore_int_sb_attn_init(p_hwfn, p_ptt, p_virt, p_phys); in ecore_int_sb_attn_alloc()
1800 void *p_virt; in ecore_int_sp_sb_alloc() local
1810 p_virt = OSAL_DMA_ALLOC_COHERENT(p_hwfn->p_dev, in ecore_int_sp_sb_alloc()
1813 if (!p_virt) { in ecore_int_sp_sb_alloc()
1822 p_virt, p_phys, ECORE_SP_SB_ID); in ecore_int_sp_sb_alloc()
H A Decore_vf.c249 if (p_iov->bulletin.p_virt) { in _ecore_vf_pf_release()
252 p_iov->bulletin.p_virt, in _ecore_vf_pf_release()
609 p_iov->bulletin.p_virt = OSAL_DMA_ALLOC_COHERENT(p_hwfn->p_dev, in ecore_vf_hw_prepare()
614 p_iov->bulletin.p_virt, in ecore_vf_hw_prepare()
1722 crc_size = sizeof(p_iov->bulletin.p_virt->crc); in ecore_vf_read_bulletin()
1726 OSAL_MEMCPY(&shadow, p_iov->bulletin.p_virt, p_iov->bulletin.size); in ecore_vf_read_bulletin()
H A Decore_spq.h185 struct ecore_spq_entry *p_virt; member
H A Decore_vfpf_if.h666 struct ecore_bulletin_content *p_virt; member
H A Decore_ll2.c1152 void *p_virt; in ecore_ll2_acquire_connection_ooo() local
1184 p_virt = OSAL_DMA_ALLOC_COHERENT(p_hwfn->p_dev, in ecore_ll2_acquire_connection_ooo()
1187 if (!p_virt) { in ecore_ll2_acquire_connection_ooo()
1194 p_buf->rx_buffer_virt_addr = p_virt; in ecore_ll2_acquire_connection_ooo()
H A Dqlnx_os.c6248 union eth_tx_bd_types *p_virt; in qlnx_alloc_mem_txq() local
6262 sizeof(*p_virt), in qlnx_alloc_mem_txq()