Lines Matching full:heap

16  * The GPU heap context is an opaque structure used by the GPU to track the
17 * heap allocations. The driver should only touch it to initialize it (zero all
24 * struct panthor_heap_chunk_header - Heap chunk header
28 * @next: Next heap chunk in the list.
39 * struct panthor_heap_chunk - Structure used to keep track of allocated heap chunks.
42 /** @node: Used to insert the heap chunk in panthor_heap::chunks. */
45 /** @bo: Buffer object backing the heap chunk. */
50 * struct panthor_heap - Structure used to manage tiler heap contexts.
53 /** @chunks: List containing all heap chunks allocated so far. */
71 /** @chunk_count: Number of heap chunks currently allocated. */
78 * struct panthor_heap_pool - Pool of heap contexts
98 /** @gpu_contexts: Buffer object containing the GPU heap contexts. */
125 struct panthor_heap *heap, in panthor_free_heap_chunk() argument
128 mutex_lock(&heap->lock); in panthor_free_heap_chunk()
130 heap->chunk_count--; in panthor_free_heap_chunk()
131 mutex_unlock(&heap->lock); in panthor_free_heap_chunk()
133 atomic_sub(heap->chunk_size, &pool->size); in panthor_free_heap_chunk()
140 struct panthor_heap *heap, in panthor_alloc_heap_chunk() argument
151 chunk->bo = panthor_kernel_bo_create(pool->ptdev, pool->vm, heap->chunk_size, in panthor_alloc_heap_chunk()
167 if (initial_chunk && !list_empty(&heap->chunks)) { in panthor_alloc_heap_chunk()
171 prev_chunk = list_first_entry(&heap->chunks, in panthor_alloc_heap_chunk()
177 (heap->chunk_size >> 12); in panthor_alloc_heap_chunk()
182 mutex_lock(&heap->lock); in panthor_alloc_heap_chunk()
183 list_add(&chunk->node, &heap->chunks); in panthor_alloc_heap_chunk()
184 heap->chunk_count++; in panthor_alloc_heap_chunk()
185 mutex_unlock(&heap->lock); in panthor_alloc_heap_chunk()
187 atomic_add(heap->chunk_size, &pool->size); in panthor_alloc_heap_chunk()
201 struct panthor_heap *heap) in panthor_free_heap_chunks() argument
205 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) in panthor_free_heap_chunks()
206 panthor_free_heap_chunk(pool, heap, chunk); in panthor_free_heap_chunks()
210 struct panthor_heap *heap, in panthor_alloc_heap_chunks() argument
217 ret = panthor_alloc_heap_chunk(pool, heap, true); in panthor_alloc_heap_chunks()
228 struct panthor_heap *heap; in panthor_heap_destroy_locked() local
230 heap = xa_erase(&pool->xa, handle); in panthor_heap_destroy_locked()
231 if (!heap) in panthor_heap_destroy_locked()
234 panthor_free_heap_chunks(pool, heap); in panthor_heap_destroy_locked()
235 mutex_destroy(&heap->lock); in panthor_heap_destroy_locked()
236 kfree(heap); in panthor_heap_destroy_locked()
241 * panthor_heap_destroy() - Destroy a heap context
257 * panthor_heap_create() - Create a heap context
258 * @pool: Pool to instantiate the heap context from.
265 * @heap_ctx_gpu_va: Pointer holding the GPU address of the allocated heap
268 * assigned to the heap context.
280 struct panthor_heap *heap; in panthor_heap_create() local
300 /* The pool has been destroyed, we can't create a new heap. */ in panthor_heap_create()
304 heap = kzalloc(sizeof(*heap), GFP_KERNEL); in panthor_heap_create()
305 if (!heap) { in panthor_heap_create()
310 mutex_init(&heap->lock); in panthor_heap_create()
311 INIT_LIST_HEAD(&heap->chunks); in panthor_heap_create()
312 heap->chunk_size = chunk_size; in panthor_heap_create()
313 heap->max_chunks = max_chunks; in panthor_heap_create()
314 heap->target_in_flight = target_in_flight; in panthor_heap_create()
316 ret = panthor_alloc_heap_chunks(pool, heap, initial_chunk_count); in panthor_heap_create()
320 first_chunk = list_first_entry(&heap->chunks, in panthor_heap_create()
326 /* The pool has been destroyed, we can't create a new heap. */ in panthor_heap_create()
330 ret = xa_alloc(&pool->xa, &id, heap, in panthor_heap_create()
349 panthor_free_heap_chunks(pool, heap); in panthor_heap_create()
350 mutex_destroy(&heap->lock); in panthor_heap_create()
351 kfree(heap); in panthor_heap_create()
359 * panthor_heap_return_chunk() - Return an unused heap chunk
360 * @pool: The pool this heap belongs to.
361 * @heap_gpu_va: The GPU address of the heap context.
365 * couldn't be linked to the heap context through the FW interface because
375 struct panthor_heap *heap; in panthor_heap_return_chunk() local
382 heap = xa_load(&pool->xa, heap_id); in panthor_heap_return_chunk()
383 if (!heap) { in panthor_heap_return_chunk()
390 mutex_lock(&heap->lock); in panthor_heap_return_chunk()
391 list_for_each_entry_safe(chunk, tmp, &heap->chunks, node) { in panthor_heap_return_chunk()
395 heap->chunk_count--; in panthor_heap_return_chunk()
396 atomic_sub(heap->chunk_size, &pool->size); in panthor_heap_return_chunk()
400 mutex_unlock(&heap->lock); in panthor_heap_return_chunk()
416 * panthor_heap_grow() - Make a heap context grow.
417 * @pool: The pool this heap belongs to.
418 * @heap_gpu_va: The GPU address of the heap context.
424 * - 0 if a new heap was allocated
439 struct panthor_heap *heap; in panthor_heap_grow() local
446 heap = xa_load(&pool->xa, heap_id); in panthor_heap_grow()
447 if (!heap) { in panthor_heap_grow()
457 if (renderpasses_in_flight > heap->target_in_flight || in panthor_heap_grow()
458 heap->chunk_count >= heap->max_chunks) { in panthor_heap_grow()
474 ret = panthor_alloc_heap_chunk(pool, heap, false); in panthor_heap_grow()
478 chunk = list_first_entry(&heap->chunks, in panthor_heap_grow()
482 (heap->chunk_size >> 12); in panthor_heap_grow()
500 * panthor_heap_pool_put() - Release a heap pool reference
510 * panthor_heap_pool_get() - Get a heap pool reference
525 * panthor_heap_pool_create() - Create a heap pool
527 * @vm: The VM this heap pool will be attached to.
529 * Heap pools might contain up to 128 heap contexts, and are per-VM.
546 /* We want a weak ref here: the heap pool belongs to the VM, so we're in panthor_heap_pool_create()
547 * sure that, as long as the heap pool exists, the VM exists too. in panthor_heap_pool_create()
578 * panthor_heap_pool_destroy() - Destroy a heap pool.
581 * This function destroys all heap contexts and their resources. Thus
582 * preventing any use of the heap context or the chunk attached to them
585 * If the GPU still has access to some heap contexts, a fault should be
589 * The heap pool object is only released when all references to this pool
594 struct panthor_heap *heap; in panthor_heap_pool_destroy() local
601 xa_for_each(&pool->xa, i, heap) in panthor_heap_pool_destroy()
617 * panthor_heap_pool_size() - Get a heap pool's total size