Lines Matching full:entry

32 static void tegra_vde_release_entry(struct tegra_vde_cache_entry *entry)  in tegra_vde_release_entry()  argument
34 struct dma_buf *dmabuf = entry->a->dmabuf; in tegra_vde_release_entry()
36 WARN_ON_ONCE(entry->refcnt); in tegra_vde_release_entry()
38 if (entry->vde->domain) in tegra_vde_release_entry()
39 tegra_vde_iommu_unmap(entry->vde, entry->iova); in tegra_vde_release_entry()
41 dma_buf_unmap_attachment_unlocked(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry()
42 dma_buf_detach(dmabuf, entry->a); in tegra_vde_release_entry()
45 list_del(&entry->list); in tegra_vde_release_entry()
46 kfree(entry); in tegra_vde_release_entry()
51 struct tegra_vde_cache_entry *entry; in tegra_vde_delayed_unmap() local
54 entry = container_of(work, struct tegra_vde_cache_entry, in tegra_vde_delayed_unmap()
56 vde = entry->vde; in tegra_vde_delayed_unmap()
59 tegra_vde_release_entry(entry); in tegra_vde_delayed_unmap()
70 struct tegra_vde_cache_entry *entry; in tegra_vde_dmabuf_cache_map() local
78 list_for_each_entry(entry, &vde->map_list, list) { in tegra_vde_dmabuf_cache_map()
79 if (entry->a->dmabuf != dmabuf) in tegra_vde_dmabuf_cache_map()
82 if (!cancel_delayed_work(&entry->dwork)) in tegra_vde_dmabuf_cache_map()
85 if (entry->dma_dir != dma_dir) in tegra_vde_dmabuf_cache_map()
86 entry->dma_dir = DMA_BIDIRECTIONAL; in tegra_vde_dmabuf_cache_map()
91 *addrp = iova_dma_addr(&vde->iova, entry->iova); in tegra_vde_dmabuf_cache_map()
93 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map()
118 entry = kzalloc(sizeof(*entry), GFP_KERNEL); in tegra_vde_dmabuf_cache_map()
119 if (!entry) { in tegra_vde_dmabuf_cache_map()
135 INIT_DELAYED_WORK(&entry->dwork, tegra_vde_delayed_unmap); in tegra_vde_dmabuf_cache_map()
136 list_add(&entry->list, &vde->map_list); in tegra_vde_dmabuf_cache_map()
138 entry->dma_dir = dma_dir; in tegra_vde_dmabuf_cache_map()
139 entry->iova = iova; in tegra_vde_dmabuf_cache_map()
140 entry->vde = vde; in tegra_vde_dmabuf_cache_map()
141 entry->sgt = sgt; in tegra_vde_dmabuf_cache_map()
142 entry->a = attachment; in tegra_vde_dmabuf_cache_map()
144 entry->refcnt++; in tegra_vde_dmabuf_cache_map()
146 *ap = entry->a; in tegra_vde_dmabuf_cache_map()
153 kfree(entry); in tegra_vde_dmabuf_cache_map()
168 struct tegra_vde_cache_entry *entry; in tegra_vde_dmabuf_cache_unmap() local
172 list_for_each_entry(entry, &vde->map_list, list) { in tegra_vde_dmabuf_cache_unmap()
173 if (entry->a != a) in tegra_vde_dmabuf_cache_unmap()
176 WARN_ON_ONCE(!entry->refcnt); in tegra_vde_dmabuf_cache_unmap()
178 if (--entry->refcnt == 0) { in tegra_vde_dmabuf_cache_unmap()
180 tegra_vde_release_entry(entry); in tegra_vde_dmabuf_cache_unmap()
182 schedule_delayed_work(&entry->dwork, 5 * HZ); in tegra_vde_dmabuf_cache_unmap()
192 struct tegra_vde_cache_entry *entry, *tmp; in tegra_vde_dmabuf_cache_unmap_sync() local
196 list_for_each_entry_safe(entry, tmp, &vde->map_list, list) { in tegra_vde_dmabuf_cache_unmap_sync()
197 if (entry->refcnt) in tegra_vde_dmabuf_cache_unmap_sync()
200 if (!cancel_delayed_work(&entry->dwork)) in tegra_vde_dmabuf_cache_unmap_sync()
203 tegra_vde_release_entry(entry); in tegra_vde_dmabuf_cache_unmap_sync()
211 struct tegra_vde_cache_entry *entry, *tmp; in tegra_vde_dmabuf_cache_unmap_all() local
216 list_for_each_entry_safe(entry, tmp, &vde->map_list, list) { in tegra_vde_dmabuf_cache_unmap_all()
217 if (!cancel_delayed_work(&entry->dwork)) in tegra_vde_dmabuf_cache_unmap_all()
220 tegra_vde_release_entry(entry); in tegra_vde_dmabuf_cache_unmap_all()