/linux/drivers/net/ethernet/intel/iavf/ |
H A D | iavf_adminq.c | 45 ret_code = iavf_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in iavf_alloc_adminq_arq_ring() 75 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf); in iavf_free_adminq_arq() 94 ret_code = iavf_allocate_virt_mem(hw, &hw->aq.arq.dma_head, in iavf_alloc_arq_bufs() 99 hw->aq.arq.r.arq_bi = (struct iavf_dma_mem *)hw->aq.arq.dma_head.va; in iavf_alloc_arq_bufs() 103 bi = &hw->aq.arq.r.arq_bi[i]; in iavf_alloc_arq_bufs() 112 desc = IAVF_ADMINQ_DESC(hw->aq.arq, i); in iavf_alloc_arq_bufs() 140 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in iavf_alloc_arq_bufs() 141 iavf_free_virt_mem(hw, &hw->aq.arq.dma_head); in iavf_alloc_arq_bufs() 197 iavf_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in iavf_free_arq_bufs() 200 iavf_free_dma_mem(hw, &hw->aq.arq.desc_buf); in iavf_free_arq_bufs() [all …]
|
H A D | iavf_adminq.h | 58 struct iavf_adminq_ring arq; /* receive queue */ member
|
/linux/drivers/net/ethernet/intel/i40e/ |
H A D | i40e_adminq.c | 45 ret_code = i40e_allocate_dma_mem(hw, &hw->aq.arq.desc_buf, in i40e_alloc_adminq_arq_ring() 74 i40e_free_dma_mem(hw, &hw->aq.arq.desc_buf); in i40e_free_adminq_arq() 93 ret_code = i40e_allocate_virt_mem(hw, &hw->aq.arq.dma_head, in i40e_alloc_arq_bufs() 97 hw->aq.arq.r.arq_bi = (struct i40e_dma_mem *)hw->aq.arq.dma_head.va; in i40e_alloc_arq_bufs() 101 bi = &hw->aq.arq.r.arq_bi[i]; in i40e_alloc_arq_bufs() 109 desc = I40E_ADMINQ_DESC(hw->aq.arq, i); in i40e_alloc_arq_bufs() 137 i40e_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in i40e_alloc_arq_bufs() 138 i40e_free_virt_mem(hw, &hw->aq.arq.dma_head); in i40e_alloc_arq_bufs() 192 i40e_free_dma_mem(hw, &hw->aq.arq.r.arq_bi[i]); in i40e_free_arq_bufs() 195 i40e_free_dma_mem(hw, &hw->aq.arq.desc_buf); in i40e_free_arq_bufs() [all …]
|
H A D | i40e_adminq.h | 58 struct i40e_adminq_ring arq; /* receive queue */ member
|
H A D | i40e_debugfs.c | 503 ring = &(hw->aq.arq); in i40e_dbg_dump_aq_desc()
|
/linux/drivers/net/ethernet/hisilicon/hns3/hns3vf/ |
H A D | hclgevf_mbx.c | 207 if (atomic_read(&hdev->arq.count) >= in hclgevf_handle_mbx_msg() 216 memcpy(hdev->arq.msg_q[hdev->arq.tail], &req->msg, in hclgevf_handle_mbx_msg() 218 hclge_mbx_tail_ptr_move_arq(hdev->arq); in hclgevf_handle_mbx_msg() 219 atomic_inc(&hdev->arq.count); in hclgevf_handle_mbx_msg() 314 tail = hdev->arq.tail; in hclgevf_mbx_async_handler() 317 while (tail != hdev->arq.head) { in hclgevf_mbx_async_handler() 325 msg_q = hdev->arq.msg_q[hdev->arq.head]; in hclgevf_mbx_async_handler() 384 hclge_mbx_head_ptr_move_arq(hdev->arq); in hclgevf_mbx_async_handler() 385 atomic_dec(&hdev->arq.count); in hclgevf_mbx_async_handler() 386 msg_q = hdev->arq.msg_q[hdev->arq.head]; in hclgevf_mbx_async_handler()
|
H A D | hclgevf_main.h | 262 struct hclgevf_mbx_arq_ring arq; /* mailbox async rx queue */ member
|
H A D | hclgevf_main.c | 93 hdev->arq.hdev = hdev; in hclgevf_arq_init() 94 hdev->arq.head = 0; in hclgevf_arq_init() 95 hdev->arq.tail = 0; in hclgevf_arq_init() 96 atomic_set(&hdev->arq.count, 0); in hclgevf_arq_init()
|
/linux/drivers/net/ethernet/hisilicon/hns3/ |
H A D | hclge_mbx.h | 249 #define hclge_mbx_tail_ptr_move_arq(arq) \ argument 250 (arq.tail = (arq.tail + 1) % HCLGE_MBX_MAX_ARQ_MSG_NUM) 251 #define hclge_mbx_head_ptr_move_arq(arq) \ argument 252 (arq.head = (arq.head + 1) % HCLGE_MBX_MAX_ARQ_MSG_NUM)
|
/linux/drivers/crypto/ |
H A D | mxs-dcp.c | 309 static int mxs_dcp_aes_block_crypt(struct crypto_async_request *arq) in mxs_dcp_aes_block_crypt() argument 313 struct skcipher_request *req = skcipher_request_cast(arq); in mxs_dcp_aes_block_crypt() 314 struct dcp_async_ctx *actx = crypto_tfm_ctx(arq->tfm); in mxs_dcp_aes_block_crypt() 412 struct crypto_async_request *arq; in dcp_chan_thread_aes() local 421 arq = crypto_dequeue_request(&sdcp->queue[chan]); in dcp_chan_thread_aes() 424 if (!backlog && !arq) { in dcp_chan_thread_aes() 434 if (arq) { in dcp_chan_thread_aes() 435 ret = mxs_dcp_aes_block_crypt(arq); in dcp_chan_thread_aes() 436 crypto_request_complete(arq, ret); in dcp_chan_thread_aes() 467 struct crypto_async_request *arq = &req->base; in mxs_dcp_aes_enqueue() local [all …]
|
/linux/drivers/net/ethernet/intel/idpf/ |
H A D | idpf.h | 725 if (!adapter->hw.arq) in idpf_is_reset_detected() 728 return !(readl(idpf_get_reg_addr(adapter, adapter->hw.arq->reg.len)) & in idpf_is_reset_detected() 729 adapter->hw.arq->reg.len_mask); in idpf_is_reset_detected()
|
H A D | idpf_controlq.h | 108 struct idpf_ctlq_info *arq; member
|
H A D | idpf_virtchnl.c | 706 err = idpf_ctlq_recv(adapter->hw.arq, &num_recv, &ctlq_msg); in idpf_recv_mb_msg() 723 adapter->hw.arq, in idpf_recv_mb_msg() 2836 hw->arq = idpf_find_ctlq(hw, IDPF_CTLQ_TYPE_MAILBOX_RX, in idpf_init_dflt_mbx() 2839 if (!hw->asq || !hw->arq) { in idpf_init_dflt_mbx() 2856 if (adapter->hw.arq && adapter->hw.asq) { in idpf_deinit_dflt_mbx() 2860 adapter->hw.arq = NULL; in idpf_deinit_dflt_mbx()
|
/linux/net/netfilter/ |
H A D | nf_conntrack_h323_main.c | 1444 unsigned char **data, AdmissionRequest *arq) in process_arq() argument 1458 if ((arq->options & eAdmissionRequest_destCallSignalAddress) && in process_arq() 1459 get_h225_addr(ct, *data, &arq->destCallSignalAddress, in process_arq() 1467 &arq->destCallSignalAddress, in process_arq() 1472 if ((arq->options & eAdmissionRequest_srcCallSignalAddress) && in process_arq() 1473 get_h225_addr(ct, *data, &arq->srcCallSignalAddress, in process_arq() 1480 &arq->srcCallSignalAddress, in process_arq()
|