Home
last modified time | relevance | path

Searched refs:dma_mem (Results 1 – 18 of 18) sorted by relevance

/linux/kernel/dma/
H A Dcoherent.c25 if (dev && dev->dma_mem) in dev_get_coherent_memory()
26 return dev->dma_mem; in dev_get_coherent_memory()
41 struct dma_coherent_mem *dma_mem; in dma_init_coherent_memory() local
52 dma_mem = kzalloc(sizeof(struct dma_coherent_mem), GFP_KERNEL); in dma_init_coherent_memory()
53 if (!dma_mem) in dma_init_coherent_memory()
55 dma_mem->bitmap = bitmap_zalloc(pages, GFP_KERNEL); in dma_init_coherent_memory()
56 if (!dma_mem->bitmap) in dma_init_coherent_memory()
59 dma_mem->virt_base = mem_base; in dma_init_coherent_memory()
60 dma_mem->device_base = device_addr; in dma_init_coherent_memory()
61 dma_mem->pfn_base = PFN_DOWN(phys_addr); in dma_init_coherent_memory()
[all …]
/linux/drivers/tee/
H A Dtee_shm.c60 struct tee_shm_dma_mem *dma_mem; in tee_shm_release() local
62 dma_mem = container_of(shm, struct tee_shm_dma_mem, shm); in tee_shm_release()
63 p = dma_mem; in tee_shm_release()
64 dma_free_pages(&teedev->dev, shm->size, dma_mem->page, in tee_shm_release()
65 dma_mem->dma_addr, DMA_BIDIRECTIONAL); in tee_shm_release()
306 struct tee_shm_dma_mem *dma_mem; in tee_shm_alloc_dma_mem() local
318 dma_mem = kzalloc(sizeof(*dma_mem), GFP_KERNEL); in tee_shm_alloc_dma_mem()
319 if (!dma_mem) in tee_shm_alloc_dma_mem()
322 refcount_set(&dma_mem->shm.refcount, 1); in tee_shm_alloc_dma_mem()
323 dma_mem->shm.ctx = ctx; in tee_shm_alloc_dma_mem()
[all …]
/linux/drivers/scsi/be2iscsi/
H A Dbe.h31 struct be_dma_mem dma_mem; member
53 return q->dma_mem.va + q->head * q->entry_size; in queue_head_node()
58 return q->dma_mem.va + wrb_num * q->entry_size; in queue_get_wrb()
63 return q->dma_mem.va + q->tail * q->entry_size; in queue_tail_node()
H A Dbe_cmds.c741 struct be_dma_mem *q_mem = &eq->dma_mem; in beiscsi_cmd_eq_create()
783 struct be_dma_mem *q_mem = &cq->dma_mem; in beiscsi_cmd_cq_create()
856 struct be_dma_mem *q_mem = &mccq->dma_mem; in beiscsi_cmd_mccq_create()
982 struct be_dma_mem *q_mem = &dq->dma_mem; in be_cmd_create_default_pdu_queue()
H A Dbe_main.c2998 struct be_dma_mem *mem = &q->dma_mem; in be_fill_queue()
3030 mem = &eq->dma_mem; in beiscsi_create_eqs()
3067 mem = &eq->dma_mem; in beiscsi_create_eqs()
3096 mem = &cq->dma_mem; in beiscsi_create_cqs()
3130 mem = &cq->dma_mem; in beiscsi_create_cqs()
3155 mem = &dq->dma_mem; in beiscsi_create_def_hdr()
3207 mem = &dataq->dma_mem; in beiscsi_create_def_data()
3322 struct be_dma_mem *mem = &q->dma_mem; in be_queue_free()
3333 struct be_dma_mem *mem = &q->dma_mem; in be_queue_alloc()
/linux/drivers/net/ethernet/intel/idpf/
H A Didpf_virtchnl.c130 struct idpf_dma_mem *dma_mem; in idpf_mb_clean() local
144 dma_mem = q_msg[i]->ctx.indirect.payload; in idpf_mb_clean()
145 if (dma_mem) in idpf_mb_clean()
146 dma_free_coherent(&adapter->pdev->dev, dma_mem->size, in idpf_mb_clean()
147 dma_mem->va, dma_mem->pa); in idpf_mb_clean()
149 kfree(dma_mem); in idpf_mb_clean()
223 struct idpf_dma_mem *dma_mem; in idpf_send_mb_msg() local
242 dma_mem = kzalloc(sizeof(*dma_mem), GFP_ATOMIC); in idpf_send_mb_msg()
243 if (!dma_mem) { in idpf_send_mb_msg()
256 dma_mem->size = IDPF_CTLQ_MAX_BUF_LEN; in idpf_send_mb_msg()
[all …]
/linux/drivers/net/ethernet/qlogic/qed/
H A Dqed_cxt.c776 if (!p_t2 || !p_t2->dma_mem) in qed_cxt_src_t2_free()
780 if (p_t2->dma_mem[i].virt_addr) in qed_cxt_src_t2_free()
782 p_t2->dma_mem[i].size, in qed_cxt_src_t2_free()
783 p_t2->dma_mem[i].virt_addr, in qed_cxt_src_t2_free()
784 p_t2->dma_mem[i].phys_addr); in qed_cxt_src_t2_free()
786 kfree(p_t2->dma_mem); in qed_cxt_src_t2_free()
787 p_t2->dma_mem = NULL; in qed_cxt_src_t2_free()
797 if (!p_t2 || !p_t2->dma_mem) in qed_cxt_t2_alloc_pages()
802 p_virt = &p_t2->dma_mem[i].virt_addr; in qed_cxt_t2_alloc_pages()
806 &p_t2->dma_mem[i].phys_addr, in qed_cxt_t2_alloc_pages()
[all …]
H A Dqed_cxt.h303 struct phys_mem_desc *dma_mem; member
/linux/drivers/scsi/
H A Dstex.c315 void *dma_mem; member
411 struct req_msg *req = hba->dma_mem + hba->req_head * hba->rq_size; in stex_alloc_req()
421 return (struct req_msg *)(hba->dma_mem + in stex_ss_alloc_req()
1147 msg_h = (struct st_msg_header *)hba->dma_mem; in stex_ss_handshake()
1737 hba->dma_mem = dma_alloc_coherent(&pdev->dev, in stex_probe()
1739 if (!hba->dma_mem) { in stex_probe()
1748 hba->dma_mem = dma_alloc_coherent(&pdev->dev, in stex_probe()
1752 if (!hba->dma_mem) { in stex_probe()
1769 hba->scratch = (__le32 *)(hba->dma_mem + scratch_offset); in stex_probe()
1770 hba->status_buffer = (struct status_msg *)(hba->dma_mem + sts_offset); in stex_probe()
[all …]
/linux/drivers/net/ethernet/emulex/benet/
H A Dbe.h130 struct be_dma_mem dma_mem; member
152 return q->dma_mem.va + q->head * q->entry_size; in queue_head_node()
157 return q->dma_mem.va + q->tail * q->entry_size; in queue_tail_node()
162 return q->dma_mem.va + index * q->entry_size; in queue_index_node()
H A Dbe_cmds.c999 struct be_dma_mem *q_mem = &eqo->q.dma_mem; in be_cmd_eq_create()
1164 struct be_dma_mem *q_mem = &cq->dma_mem; in be_cmd_cq_create()
1242 struct be_dma_mem *q_mem = &mccq->dma_mem; in be_cmd_mccq_ext_create()
1307 struct be_dma_mem *q_mem = &mccq->dma_mem; in be_cmd_mccq_org_create()
1366 struct be_dma_mem *q_mem = &txq->dma_mem; in be_cmd_txq_create()
1414 struct be_dma_mem *q_mem = &rxq->dma_mem; in be_cmd_rxq_create()
H A Dbe_main.c146 struct be_dma_mem *mem = &q->dma_mem; in be_queue_free()
158 struct be_dma_mem *mem = &q->dma_mem; in be_queue_alloc()
1437 entry = txo->q.dma_mem.va; in be_tx_timeout()
1447 entry = txo->cq.dma_mem.va; in be_tx_timeout()
/linux/drivers/net/ethernet/freescale/dpaa2/
H A Ddpaa2-eth.c3696 void *dma_mem, *key, *mask; in dpaa2_eth_set_vlan_qos() local
3710 dma_mem = kzalloc(DPAA2_CLASSIFIER_DMA_SIZE, GFP_KERNEL); in dpaa2_eth_set_vlan_qos()
3711 if (!dma_mem) in dpaa2_eth_set_vlan_qos()
3720 err = dpni_prepare_key_cfg(&kg_cfg, dma_mem); in dpaa2_eth_set_vlan_qos()
3729 qos_cfg.key_cfg_iova = dma_map_single(dev, dma_mem, in dpaa2_eth_set_vlan_qos()
3796 kfree(dma_mem); in dpaa2_eth_set_vlan_qos()
4243 u8 *dma_mem; in dpaa2_eth_set_dist_key() local
4273 dma_mem = kzalloc(DPAA2_CLASSIFIER_DMA_SIZE, GFP_KERNEL); in dpaa2_eth_set_dist_key()
4274 if (!dma_mem) in dpaa2_eth_set_dist_key()
4277 err = dpni_prepare_key_cfg(&cls_cfg, dma_mem); in dpaa2_eth_set_dist_key()
[all …]
H A Ddpaa2-switch.c847 u8 *dma_mem; in dpaa2_switch_fdb_iterate() local
851 dma_mem = kzalloc(fdb_dump_size, GFP_KERNEL); in dpaa2_switch_fdb_iterate()
852 if (!dma_mem) in dpaa2_switch_fdb_iterate()
855 fdb_dump_iova = dma_map_single(dev, dma_mem, fdb_dump_size, in dpaa2_switch_fdb_iterate()
873 fdb_entries = (struct fdb_dump_entry *)dma_mem; in dpaa2_switch_fdb_iterate()
883 kfree(dma_mem); in dpaa2_switch_fdb_iterate()
890 kfree(dma_mem); in dpaa2_switch_fdb_iterate()
/linux/drivers/infiniband/hw/irdma/
H A Dverbs.h171 struct irdma_dma_mem dma_mem; member
H A Dutils.c1167 dma_free_coherent(rf->sc_dev.hw->device, iwqp->kqp.dma_mem.size, in irdma_free_qp_rsrc()
1168 iwqp->kqp.dma_mem.va, iwqp->kqp.dma_mem.pa); in irdma_free_qp_rsrc()
1169 iwqp->kqp.dma_mem.va = NULL; in irdma_free_qp_rsrc()
H A Dverbs.c696 struct irdma_dma_mem *mem = &iwqp->kqp.dma_mem; in irdma_setup_kmode_qp()
/linux/include/linux/
H A Ddevice.h612 struct dma_coherent_mem *dma_mem; /* internal for coherent mem member