Searched refs:dma_sync (Results 1 – 6 of 6) sorted by relevance
238 pool->dma_sync = true; in page_pool_init() 286 if (!pool->dma_map || !pool->dma_sync) { in page_pool_init() 465 if (pool->dma_sync && dma_dev_need_sync(pool->p.dev)) { in page_pool_dma_sync_for_device() 468 if (pool->dma_sync) in page_pool_dma_sync_for_device() 821 * If pool->dma_sync is set, we'll try to sync the DMA area for1161 if (pool->dma_sync) { in page_pool_scrub() 1163 pool->dma_sync = false; in page_pool_scrub() 1166 * value of dma_sync (and thus perform a sync) have in page_pool_scrub()
442 pool->dma_sync = false; in mp_dmabuf_devmem_init()
173 atomic_t dma_sync; member781 atomic_dec(&pthr->dma_sync); in perf_dma_copy_callback()847 atomic_inc(&pthr->dma_sync); in perf_copy_chunk()910 atomic_set(&pthr->dma_sync, 0); in perf_init_test()972 (atomic_read(&pthr->dma_sync) == 0 || in perf_sync_test()
175 bool dma_sync:1; /* Perform DMA sync for device */ member
63 void (*dma_sync)(struct device *, in __isp_stat_buf_sync_magic()68 dma_sync(stat->isp->dev, buf->dma_addr, 0, MAGIC_SIZE, dir); in __isp_stat_buf_sync_magic()69 dma_sync(stat->isp->dev, buf->dma_addr + (buf_size & PAGE_MASK), in __isp_stat_buf_sync_magic()
64 - don't call dma_sync function on allocations from