Lines Matching full:heap
16 * The GPU heap context is an opaque structure used by the GPU to track the
17 * heap allocations. The driver should only touch it to initialize it (zero all
24 * struct panthor_heap_chunk_header - Heap chunk header
28 * @next: Next heap chunk in the list.
39 * struct panthor_heap_chunk - Structure used to keep track of allocated heap chunks.
42 /** @node: Used to insert the heap chunk in panthor_heap::chunks. */
45 /** @bo: Buffer object backing the heap chunk. */
50 * struct panthor_heap - Structure used to manage tiler heap contexts.
53 /** @chunks: List containing all heap chunks allocated so far. */
71 /** @chunk_count: Number of heap chunks currently allocated. */
78 * struct panthor_heap_pool - Pool of heap contexts
98 /** @gpu_contexts: Buffer object containing the GPU heap contexts. */
122 struct panthor_heap *heap, in panthor_free_heap_chunk() argument
125 mutex_lock(&heap->lock); in panthor_free_heap_chunk()
127 heap->chunk_count--; in panthor_free_heap_chunk()
128 mutex_unlock(&heap->lock); in panthor_free_heap_chunk()
136 struct panthor_heap *heap, in panthor_alloc_heap_chunk() argument
147 chunk->bo = panthor_kernel_bo_create(ptdev, vm, heap->chunk_size, in panthor_alloc_heap_chunk()
163 if (initial_chunk && !list_empty(&heap->chunks)) { in panthor_alloc_heap_chunk()
167 prev_chunk = list_first_entry(&heap->chunks, in panthor_alloc_heap_chunk()
173 (heap->chunk_size >> 12); in panthor_alloc_heap_chunk()
178 mutex_lock(&heap->lock); in panthor_alloc_heap_chunk()
179 list_add(&chunk->node, &heap->chunks); in panthor_alloc_heap_chunk()
180 heap->chunk_count++; in panthor_alloc_heap_chunk()
181 mutex_unlock(&heap->lock); in panthor_alloc_heap_chunk()
195 struct panthor_heap *heap) in panthor_free_heap_chunks() argument
199 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) in panthor_free_heap_chunks()
200 panthor_free_heap_chunk(vm, heap, chunk); in panthor_free_heap_chunks()
205 struct panthor_heap *heap, in panthor_alloc_heap_chunks() argument
212 ret = panthor_alloc_heap_chunk(ptdev, vm, heap, true); in panthor_alloc_heap_chunks()
223 struct panthor_heap *heap; in panthor_heap_destroy_locked() local
225 heap = xa_erase(&pool->xa, handle); in panthor_heap_destroy_locked()
226 if (!heap) in panthor_heap_destroy_locked()
229 panthor_free_heap_chunks(pool->vm, heap); in panthor_heap_destroy_locked()
230 mutex_destroy(&heap->lock); in panthor_heap_destroy_locked()
231 kfree(heap); in panthor_heap_destroy_locked()
236 * panthor_heap_destroy() - Destroy a heap context
252 * panthor_heap_create() - Create a heap context
253 * @pool: Pool to instantiate the heap context from.
260 * @heap_ctx_gpu_va: Pointer holding the GPU address of the allocated heap
263 * assigned to the heap context.
275 struct panthor_heap *heap; in panthor_heap_create() local
295 /* The pool has been destroyed, we can't create a new heap. */ in panthor_heap_create()
299 heap = kzalloc(sizeof(*heap), GFP_KERNEL); in panthor_heap_create()
300 if (!heap) { in panthor_heap_create()
305 mutex_init(&heap->lock); in panthor_heap_create()
306 INIT_LIST_HEAD(&heap->chunks); in panthor_heap_create()
307 heap->chunk_size = chunk_size; in panthor_heap_create()
308 heap->max_chunks = max_chunks; in panthor_heap_create()
309 heap->target_in_flight = target_in_flight; in panthor_heap_create()
311 ret = panthor_alloc_heap_chunks(pool->ptdev, vm, heap, in panthor_heap_create()
316 first_chunk = list_first_entry(&heap->chunks, in panthor_heap_create()
322 /* The pool has been destroyed, we can't create a new heap. */ in panthor_heap_create()
326 ret = xa_alloc(&pool->xa, &id, heap, in panthor_heap_create()
345 panthor_free_heap_chunks(pool->vm, heap); in panthor_heap_create()
346 mutex_destroy(&heap->lock); in panthor_heap_create()
347 kfree(heap); in panthor_heap_create()
355 * panthor_heap_return_chunk() - Return an unused heap chunk
356 * @pool: The pool this heap belongs to.
357 * @heap_gpu_va: The GPU address of the heap context.
361 * couldn't be linked to the heap context through the FW interface because
371 struct panthor_heap *heap; in panthor_heap_return_chunk() local
378 heap = xa_load(&pool->xa, heap_id); in panthor_heap_return_chunk()
379 if (!heap) { in panthor_heap_return_chunk()
386 mutex_lock(&heap->lock); in panthor_heap_return_chunk()
387 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) { in panthor_heap_return_chunk()
391 heap->chunk_count--; in panthor_heap_return_chunk()
395 mutex_unlock(&heap->lock); in panthor_heap_return_chunk()
411 * panthor_heap_grow() - Make a heap context grow.
412 * @pool: The pool this heap belongs to.
413 * @heap_gpu_va: The GPU address of the heap context.
419 * - 0 if a new heap was allocated
434 struct panthor_heap *heap; in panthor_heap_grow() local
441 heap = xa_load(&pool->xa, heap_id); in panthor_heap_grow()
442 if (!heap) { in panthor_heap_grow()
452 if (renderpasses_in_flight > heap->target_in_flight || in panthor_heap_grow()
453 heap->chunk_count >= heap->max_chunks) { in panthor_heap_grow()
469 ret = panthor_alloc_heap_chunk(pool->ptdev, pool->vm, heap, false); in panthor_heap_grow()
473 chunk = list_first_entry(&heap->chunks, in panthor_heap_grow()
477 (heap->chunk_size >> 12); in panthor_heap_grow()
495 * panthor_heap_pool_put() - Release a heap pool reference
505 * panthor_heap_pool_get() - Get a heap pool reference
520 * panthor_heap_pool_create() - Create a heap pool
522 * @vm: The VM this heap pool will be attached to.
524 * Heap pools might contain up to 128 heap contexts, and are per-VM.
541 /* We want a weak ref here: the heap pool belongs to the VM, so we're in panthor_heap_pool_create()
542 * sure that, as long as the heap pool exists, the VM exists too. in panthor_heap_pool_create()
571 * panthor_heap_pool_destroy() - Destroy a heap pool.
574 * This function destroys all heap contexts and their resources. Thus
575 * preventing any use of the heap context or the chunk attached to them
578 * If the GPU still has access to some heap contexts, a fault should be
582 * The heap pool object is only released when all references to this pool
587 struct panthor_heap *heap; in panthor_heap_pool_destroy() local
594 xa_for_each(&pool->xa, i, heap) in panthor_heap_pool_destroy()