| /linux/net/xdp/ |
| H A D | xsk_buff_pool.c | 320 struct xsk_dma_map *dma_map; in xp_put_pool() 322 list_for_each_entry(dma_map, &pool->umem->xsk_dma_list, list) { in xp_put_pool() 323 if (dma_map->netdev == pool->netdev) in xp_put_pool() 324 return dma_map; 333 struct xsk_dma_map *dma_map; in xp_find_dma_map() 335 dma_map = kzalloc(sizeof(*dma_map), GFP_KERNEL); in xp_find_dma_map() 336 if (!dma_map) 339 dma_map->dma_pages = kvcalloc(nr_pages, sizeof(*dma_map in xp_create_dma_map() 327 struct xsk_dma_map *dma_map; xp_find_dma_map() local 340 struct xsk_dma_map *dma_map; xp_create_dma_map() local 360 xp_destroy_dma_map(struct xsk_dma_map * dma_map) xp_destroy_dma_map() argument 367 __xp_dma_unmap(struct xsk_dma_map * dma_map,unsigned long attrs) __xp_dma_unmap() argument 387 struct xsk_dma_map *dma_map; xp_dma_unmap() local 408 xp_check_dma_contiguity(struct xsk_dma_map * dma_map) xp_check_dma_contiguity() argument 420 xp_init_dma_info(struct xsk_buff_pool * pool,struct xsk_dma_map * dma_map) xp_init_dma_info() argument 449 struct xsk_dma_map *dma_map; xp_dma_map() local [all...] |
| /linux/drivers/misc/genwqe/ |
| H A D | card_dev.c | 121 struct dma_mapping *dma_map) in __genwqe_add_mapping() argument 126 list_add(&dma_map->card_list, &cfile->map_list); in __genwqe_add_mapping() 131 struct dma_mapping *dma_map) in __genwqe_del_mapping() argument 136 list_del(&dma_map->card_list); in __genwqe_del_mapping() 193 struct dma_mapping *dma_map; in genwqe_remove_mappings() local 198 dma_map = list_entry(node, struct dma_mapping, card_list); in genwqe_remove_mappings() 200 list_del_init(&dma_map->card_list); in genwqe_remove_mappings() 211 __func__, i++, dma_map->u_vaddr, in genwqe_remove_mappings() 212 (unsigned long)dma_map->k_vaddr, in genwqe_remove_mappings() 213 (unsigned long)dma_map->dma_addr); in genwqe_remove_mappings() [all …]
|
| /linux/drivers/gpu/drm/i915/gem/selftests/ |
| H A D | i915_gem_dmabuf.c | 349 void *obj_map, *dma_map; in igt_dmabuf_import() local 380 dma_map = err ? NULL : map.vaddr; in igt_dmabuf_import() 381 if (!dma_map) { in igt_dmabuf_import() 396 memset(dma_map, pattern[i], PAGE_SIZE); in igt_dmabuf_import() 407 if (memchr_inv(dma_map, pattern[i], PAGE_SIZE)) { in igt_dmabuf_import()
|
| /linux/arch/um/drivers/ |
| H A D | vfio_user.c | 57 struct vfio_iommu_type1_dma_map dma_map = { in uml_vfio_user_setup_iommu() local 58 .argsz = sizeof(dma_map), in uml_vfio_user_setup_iommu() 68 if (ioctl(container, VFIO_IOMMU_MAP_DMA, &dma_map) < 0) in uml_vfio_user_setup_iommu()
|
| /linux/net/core/ |
| H A D | page_pool.c | 225 pool->dma_map = true; in page_pool_init() 286 if (!pool->dma_map || !pool->dma_sync) { in page_pool_init() 574 if (pool->dma_map && unlikely(!page_pool_dma_map(pool, page_to_netmem(page), gfp))) { in __page_pool_alloc_page_order() 595 bool dma_map = pool->dma_map; in __page_pool_alloc_netmems_slow() 626 if (dma_map && unlikely(!page_pool_dma_map(pool, netmem, gfp))) { in __page_pool_alloc_netmems_slow() 730 if (!pool->dma_map) in __page_pool_release_netmem_dma() 1160 if (!pool->destroy_cnt++ && pool->dma_map) { in page_pool_scrub() 591 bool dma_map = pool->dma_map; __page_pool_alloc_netmems_slow() local
|
| /linux/drivers/usb/host/ |
| H A D | r8a66597.h | 80 unsigned char dma_map; member 121 unsigned char dma_map; member
|
| H A D | r8a66597-hcd.c | 768 if ((r8a66597->dma_map & (1 << i)) != 0) in enable_r8a66597_pipe_dma() 778 r8a66597->dma_map |= 1 << i; in enable_r8a66597_pipe_dma() 779 dev->dma_map |= 1 << i; in enable_r8a66597_pipe_dma() 883 r8a66597->dma_map &= ~(dev->dma_map); in disable_r8a66597_pipe_all() 884 dev->dma_map = 0; in disable_r8a66597_pipe_all()
|
| /linux/drivers/net/ethernet/qlogic/qed/ |
| H A D | qed_chain.c | 120 entry->dma_map); in qed_chain_free_pbl() 305 addr_tbl[i].dma_map = phys; in qed_chain_alloc_pbl()
|
| /linux/drivers/infiniband/hw/bng_re/ |
| H A D | bng_res.c | 18 stats->dma, stats->dma_map); in bng_re_free_stats_ctx_mem() 32 &stats->dma_map, GFP_KERNEL); in bng_re_alloc_stats_ctx_mem()
|
| H A D | bng_res.h | 129 dma_addr_t dma_map; member
|
| H A D | bng_dev.c | 220 req.stats_dma_addr = cpu_to_le64(stats->dma_map); in bng_re_stats_ctx_alloc()
|
| /linux/include/net/page_pool/ |
| H A D | types.h | 173 bool dma_map:1; /* Perform DMA mapping */ member
|
| /linux/drivers/net/vmxnet3/ |
| H A D | vmxnet3_xdp.c | 117 struct vmxnet3_tx_queue *tq, bool dma_map) in vmxnet3_xdp_xmit_frame() argument 143 if (dma_map) { /* ndo_xdp_xmit */ in vmxnet3_xdp_xmit_frame()
|
| /linux/include/trace/events/ |
| H A D | dma.h | 37 DECLARE_EVENT_CLASS(dma_map, 70 DEFINE_EVENT(dma_map, name, \
|
| /linux/include/linux/qed/ |
| H A D | qed_chain.h | 72 dma_addr_t dma_map; member
|
| /linux/drivers/net/ethernet/mediatek/ |
| H A D | mtk_eth_soc.c | 1936 void *data, u16 headroom, int index, bool dma_map) in mtk_xdp_frame_map() argument 1942 if (dma_map) { /* ndo_xdp_xmit */ in mtk_xdp_frame_map() 1960 tx_buf->type = dma_map ? MTK_TYPE_XDP_NDO : MTK_TYPE_XDP_TX; in mtk_xdp_frame_map() 1971 struct net_device *dev, bool dma_map) in mtk_xdp_submit_frame() argument 2010 data, xdpf->headroom, index, dma_map); in mtk_xdp_submit_frame()
|
| /linux/Documentation/virt/hyperv/ |
| H A D | vpci.rst | 253 operations as part of dma_map/unmap_*() calls.
|
| /linux/io_uring/ |
| H A D | zcrx.c | 1008 if (WARN_ON_ONCE(!pp->dma_map)) in io_pp_zc_init()
|
| /linux/drivers/infiniband/hw/bnxt_re/ |
| H A D | main.c | 1011 req.stats_dma_addr = cpu_to_le64(stats->dma_map); in bnxt_re_net_stats_ctx_alloc()
|
| /linux/drivers/atm/ |
| H A D | fore200e.c | 1720 …oc3_regs_dma_addr = fore200e->bus->dma_map(fore200e, regs, sizeof(struct oc3_regs), DMA_FROM_DEVIC…
|
| /linux/drivers/net/ethernet/stmicro/stmmac/ |
| H A D | stmmac_main.c | 5030 struct xdp_frame *xdpf, bool dma_map) in stmmac_xdp_xmit_xdpf() argument 5057 if (dma_map) { in stmmac_xdp_xmit_xdpf()
|