Home
last modified time | relevance | path

Searched refs:heap (Results 1 – 25 of 92) sorted by relevance

1234

/linux/drivers/gpu/drm/panthor/
H A Dpanthor_heap.c122 struct panthor_heap *heap, in panthor_free_heap_chunk() argument
125 mutex_lock(&heap->lock); in panthor_free_heap_chunk()
127 heap->chunk_count--; in panthor_free_heap_chunk()
128 mutex_unlock(&heap->lock); in panthor_free_heap_chunk()
136 struct panthor_heap *heap, in panthor_alloc_heap_chunk() argument
147 chunk->bo = panthor_kernel_bo_create(ptdev, vm, heap->chunk_size, in panthor_alloc_heap_chunk()
163 if (initial_chunk && !list_empty(&heap->chunks)) { in panthor_alloc_heap_chunk()
167 prev_chunk = list_first_entry(&heap->chunks, in panthor_alloc_heap_chunk()
173 (heap->chunk_size >> 12); in panthor_alloc_heap_chunk()
178 mutex_lock(&heap->lock); in panthor_alloc_heap_chunk()
[all …]
/linux/lib/
H A Dmin_heap.c5 void __min_heap_init(min_heap_char *heap, void *data, int size) in __min_heap_init() argument
7 __min_heap_init_inline(heap, data, size); in __min_heap_init()
11 void *__min_heap_peek(struct min_heap_char *heap) in __min_heap_peek() argument
13 return __min_heap_peek_inline(heap); in __min_heap_peek()
17 bool __min_heap_full(min_heap_char *heap) in __min_heap_full() argument
19 return __min_heap_full_inline(heap); in __min_heap_full()
23 void __min_heap_sift_down(min_heap_char *heap, int pos, size_t elem_size, in __min_heap_sift_down() argument
26 __min_heap_sift_down_inline(heap, pos, elem_size, func, args); in __min_heap_sift_down()
30 void __min_heap_sift_up(min_heap_char *heap, size_t elem_size, size_t idx, in __min_heap_sift_up() argument
33 __min_heap_sift_up_inline(heap, elem_size, idx, func, args); in __min_heap_sift_up()
[all …]
/linux/drivers/dma-buf/
H A Ddma-heap.c51 static int dma_heap_buffer_alloc(struct dma_heap *heap, size_t len, in dma_heap_buffer_alloc() argument
66 dmabuf = heap->ops->allocate(heap, len, fd_flags, heap_flags); in dma_heap_buffer_alloc()
80 struct dma_heap *heap; in dma_heap_open() local
82 heap = xa_load(&dma_heap_minors, iminor(inode)); in dma_heap_open()
83 if (!heap) { in dma_heap_open()
89 file->private_data = heap; in dma_heap_open()
98 struct dma_heap *heap = file->private_data; in dma_heap_ioctl_allocate() local
110 fd = dma_heap_buffer_alloc(heap, heap_allocation->len, in dma_heap_ioctl_allocate()
201 void *dma_heap_get_drvdata(struct dma_heap *heap) in dma_heap_get_drvdata() argument
203 return heap->priv; in dma_heap_get_drvdata()
[all …]
/linux/fs/ubifs/
H A Dlprops.c50 static void move_up_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in move_up_lpt_heap() argument
63 val2 = get_heap_comp_val(heap->arr[ppos], cat); in move_up_lpt_heap()
67 heap->arr[ppos]->hpos = hpos; in move_up_lpt_heap()
68 heap->arr[hpos] = heap->arr[ppos]; in move_up_lpt_heap()
69 heap->arr[ppos] = lprops; in move_up_lpt_heap()
87 static void adjust_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in adjust_lpt_heap() argument
97 val2 = get_heap_comp_val(heap->arr[ppos], cat); in adjust_lpt_heap()
101 heap->arr[ppos]->hpos = hpos; in adjust_lpt_heap()
102 heap->arr[hpos] = heap->arr[ppos]; in adjust_lpt_heap()
103 heap->arr[ppos] = lprops; in adjust_lpt_heap()
[all …]
H A Dfind.c45 struct ubifs_lpt_heap *heap; in valuable() local
51 heap = &c->lpt_heap[cat - 1]; in valuable()
52 if (heap->cnt < heap->max_cnt) in valuable()
130 struct ubifs_lpt_heap *heap; in scan_for_dirty() local
135 heap = &c->lpt_heap[LPROPS_FREE - 1]; in scan_for_dirty()
136 for (i = 0; i < heap->cnt; i++) { in scan_for_dirty()
137 lprops = heap->arr[i]; in scan_for_dirty()
226 struct ubifs_lpt_heap *heap, *idx_heap; in ubifs_find_dirty_leb() local
272 heap = &c->lpt_heap[LPROPS_DIRTY - 1]; in ubifs_find_dirty_leb()
292 if (heap->cnt) { in ubifs_find_dirty_leb()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dbase.c255 nvkm_mmu_type(struct nvkm_mmu *mmu, int heap, u8 type) in nvkm_mmu_type() argument
257 if (heap >= 0 && !WARN_ON(mmu->type_nr == ARRAY_SIZE(mmu->type))) { in nvkm_mmu_type()
258 mmu->type[mmu->type_nr].type = type | mmu->heap[heap].type; in nvkm_mmu_type()
259 mmu->type[mmu->type_nr].heap = heap; in nvkm_mmu_type()
268 if (!WARN_ON(mmu->heap_nr == ARRAY_SIZE(mmu->heap))) { in nvkm_mmu_heap()
269 mmu->heap[mmu->heap_nr].type = type; in nvkm_mmu_heap()
270 mmu->heap[mmu->heap_nr].size = size; in nvkm_mmu_heap()
282 int heap; in nvkm_mmu_host() local
285 heap = nvkm_mmu_heap(mmu, NVKM_MEM_HOST, ~0ULL); in nvkm_mmu_host()
286 nvkm_mmu_type(mmu, heap, type); in nvkm_mmu_host()
[all …]
/linux/tools/include/nolibc/
H A Dstdlib.h72 struct nolibc_heap *heap; in free() local
77 heap = container_of(ptr, struct nolibc_heap, user_p); in free()
78 munmap(heap, heap->len); in free()
133 struct nolibc_heap *heap; in malloc() local
136 len = sizeof(*heap) + len; in malloc()
138 heap = mmap(NULL, len, PROT_READ|PROT_WRITE, MAP_ANONYMOUS|MAP_PRIVATE, in malloc()
140 if (__builtin_expect(heap == MAP_FAILED, 0)) in malloc()
143 heap->len = len; in malloc()
144 return heap->user_p; in malloc()
167 struct nolibc_heap *heap; in realloc() local
[all …]
/linux/drivers/gpu/drm/nouveau/include/nvkm/core/
H A Dmm.h12 u8 heap; member
34 int nvkm_mm_init(struct nvkm_mm *, u8 heap, u32 offset, u32 length, u32 block);
36 int nvkm_mm_head(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
38 int nvkm_mm_tail(struct nvkm_mm *, u8 heap, u8 type, u32 size_max,
44 nvkm_mm_heap_size(struct nvkm_mm *mm, u8 heap) in nvkm_mm_heap_size() argument
49 if (node->heap == heap) in nvkm_mm_heap_size()
/linux/drivers/gpu/drm/nouveau/nvkm/core/
H A Dmm.c99 b->heap = a->heap; in region_head()
111 nvkm_mm_head(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_head() argument
122 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_head()
123 if (this->heap != heap) in nvkm_mm_head()
175 b->heap = a->heap; in region_tail()
186 nvkm_mm_tail(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_tail() argument
198 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_tail()
199 if (this->heap != heap) in nvkm_mm_tail()
240 nvkm_mm_init(struct nvkm_mm *mm, u8 heap, u32 offset, u32 length, u32 block) in nvkm_mm_init() argument
277 node->heap = heap; in nvkm_mm_init()
H A Dgpuobj.c180 ret = nvkm_mm_head(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
183 ret = nvkm_mm_tail(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor()
211 return nvkm_mm_init(&gpuobj->heap, 0, 0, gpuobj->size, 1); in nvkm_gpuobj_ctor()
220 nvkm_mm_free(&gpuobj->parent->heap, &gpuobj->node); in nvkm_gpuobj_del()
221 nvkm_mm_fini(&gpuobj->heap); in nvkm_gpuobj_del()
/linux/Documentation/ABI/testing/
H A Dsysfs-kernel-mm-cma6 heap name (also sometimes called CMA areas).
8 Each CMA heap subdirectory (that is, each
9 /sys/kernel/mm/cma/<cma-heap-name> directory) contains the
15 What: /sys/kernel/mm/cma/<cma-heap-name>/alloc_pages_success
21 What: /sys/kernel/mm/cma/<cma-heap-name>/alloc_pages_fail
27 What: /sys/kernel/mm/cma/<cma-heap-name>/release_pages_success
/linux/drivers/gpu/drm/nouveau/nvif/
H A Dmmu.c35 kfree(mmu->heap); in nvif_mmu_dtor()
53 mmu->heap = NULL; in nvif_mmu_ctor()
72 mmu->heap = kmalloc_array(mmu->heap_nr, sizeof(*mmu->heap), in nvif_mmu_ctor()
76 if (ret = -ENOMEM, !mmu->heap || !mmu->type) in nvif_mmu_ctor()
92 mmu->heap[i].size = args.size; in nvif_mmu_ctor()
112 mmu->type[i].heap = args.heap; in nvif_mmu_ctor()
/linux/drivers/dma-buf/heaps/
H A DKconfig5 Choose this option to enable the system dmabuf heap. The system heap
12 Choose this option to enable dma-buf CMA heap. This heap is backed
H A Dcma_heap.c27 struct dma_heap *heap; member
32 struct cma_heap *heap; member
247 struct cma_heap *cma_heap = buffer->heap; in cma_heap_dma_buf_release()
275 static struct dma_buf *cma_heap_allocate(struct dma_heap *heap, in cma_heap_allocate() argument
280 struct cma_heap *cma_heap = dma_heap_get_drvdata(heap); in cma_heap_allocate()
339 buffer->heap = cma_heap; in cma_heap_allocate()
343 exp_info.exp_name = dma_heap_get_name(heap); in cma_heap_allocate()
383 cma_heap->heap = dma_heap_add(&exp_info); in __add_cma_heap()
384 if (IS_ERR(cma_heap->heap)) { in __add_cma_heap()
385 int ret = PTR_ERR(cma_heap->heap); in __add_cma_heap()
/linux/lib/zlib_deflate/
H A Ddeftree.c298 top = s->heap[SMALLEST]; \
299 s->heap[SMALLEST] = s->heap[s->heap_len--]; \
323 int v = s->heap[k]; in pqdownheap()
328 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { in pqdownheap()
332 if (smaller(tree, v, s->heap[j], s->depth)) break; in pqdownheap()
335 s->heap[k] = s->heap[j]; k = j; in pqdownheap()
340 s->heap[k] = v; in pqdownheap()
376 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ in gen_bitlen()
379 n = s->heap[h]; in gen_bitlen()
420 m = s->heap[--h]; in gen_bitlen()
[all …]
/linux/include/linux/
H A Ddma-heap.h23 struct dma_buf *(*allocate)(struct dma_heap *heap,
43 void *dma_heap_get_drvdata(struct dma_heap *heap);
45 const char *dma_heap_get_name(struct dma_heap *heap);
/linux/drivers/md/bcache/
H A Dbset.c60 min_heap_init(&iter.heap, NULL, MAX_BSETS); in __bch_count_data()
75 min_heap_init(&iter.heap, NULL, MAX_BSETS); in __bch_check_keys()
117 struct bkey *k = iter->heap.data->k, *next = bkey_next(k); in bch_btree_iter_next_check()
119 if (next < iter->heap.data->end && in bch_btree_iter_next_check()
892 min_heap_init(&iter.heap, NULL, MAX_BSETS); in bch_btree_insert_key()
1098 return !iter->heap.nr; in btree_iter_end()
1110 BUG_ON(!min_heap_push(&iter->heap, in bch_btree_iter_push()
1123 iter->heap.size = ARRAY_SIZE(iter->heap.preallocated); in __bch_btree_iter_init()
1124 iter->heap.nr = 0; in __bch_btree_iter_init()
1158 ret = iter->heap.data->k; in __bch_btree_iter_next()
[all …]
H A Dutil.h34 #define init_heap(heap, _size, gfp) \ argument
37 (heap)->nr = 0; \
38 (heap)->size = (_size); \
39 _bytes = (heap)->size * sizeof(*(heap)->data); \
40 (heap)->data = kvmalloc(_bytes, (gfp) & GFP_KERNEL); \
41 (heap)->data; \
44 #define free_heap(heap) \ argument
46 kvfree((heap)->data); \
47 (heap)->data = NULL; \
H A Dalloc.c204 ca->heap.nr = 0; in invalidate_buckets_lru()
210 if (!min_heap_full(&ca->heap)) in invalidate_buckets_lru()
211 min_heap_push(&ca->heap, &b, &bucket_max_cmp_callback, ca); in invalidate_buckets_lru()
212 else if (!new_bucket_max_cmp(&b, min_heap_peek(&ca->heap), ca)) { in invalidate_buckets_lru()
213 ca->heap.data[0] = b; in invalidate_buckets_lru()
214 min_heap_sift_down(&ca->heap, 0, &bucket_max_cmp_callback, ca); in invalidate_buckets_lru()
218 min_heapify_all(&ca->heap, &bucket_min_cmp_callback, ca); in invalidate_buckets_lru()
221 if (!ca->heap.nr) { in invalidate_buckets_lru()
230 b = min_heap_peek(&ca->heap)[0]; in invalidate_buckets_lru()
231 min_heap_pop(&ca->heap, &bucket_min_cmp_callback, ca); in invalidate_buckets_lru()
H A Dextents.c36 *i = iter->heap.data[--iter->heap.nr]; in sort_key_next()
276 while (iter->heap.nr > 1) { in bch_extent_sort_fixup()
277 struct btree_iter_set *top = iter->heap.data, *i = top + 1; in bch_extent_sort_fixup()
279 if (iter->heap.nr > 2 && in bch_extent_sort_fixup()
288 min_heap_sift_down(&iter->heap, i - top, &callbacks, NULL); in bch_extent_sort_fixup()
298 min_heap_sift_down(&iter->heap, i - top, &callbacks, NULL); in bch_extent_sort_fixup()
308 min_heap_sift_down(&iter->heap, 0, &callbacks, NULL); in bch_extent_sort_fixup()
/linux/drivers/accel/amdxdna/
H A Daie2_ctx.c57 struct amdxdna_gem_obj *heap = hwctx->priv->heap; in aie2_hwctx_restart() local
67 heap->mem.userptr, heap->mem.size); in aie2_hwctx_restart()
521 struct amdxdna_gem_obj *heap; in aie2_hwctx_init() local
531 heap = client->dev_heap; in aie2_hwctx_init()
532 if (!heap) { in aie2_hwctx_init()
538 drm_gem_object_get(to_gobj(heap)); in aie2_hwctx_init()
540 priv->heap = heap; in aie2_hwctx_init()
543 ret = amdxdna_gem_pin(heap); in aie2_hwctx_init()
604 heap->mem.userptr, heap->mem.size); in aie2_hwctx_init()
638 amdxdna_gem_unpin(heap); in aie2_hwctx_init()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/
H A Dnv40.c32 struct nvkm_mm heap; member
103 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv40_instobj_dtor()
136 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, align ? align : 1, &iobj->node); in nv40_instobj_new()
179 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv40_instmem_oneinit()
221 nvkm_mm_fini(&imem->heap); in nv40_instmem_dtor()
H A Dnv04.c32 struct nvkm_mm heap; member
104 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv04_instobj_dtor()
137 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, align ? align : 1, &iobj->node); in nv04_instobj_new()
210 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv04_instmem_oneinit()
248 nvkm_mm_fini(&imem->heap); in nv04_instmem_dtor()
/linux/arch/x86/boot/compressed/
H A Dmisc.c421 memptr heap = (memptr)boot_heap; in extract_kernel() local
469 free_mem_ptr = heap; /* Heap */ in extract_kernel()
470 free_mem_end_ptr = heap + BOOT_HEAP_SIZE; in extract_kernel()
512 if (heap > 0x3fffffffffffUL) in extract_kernel()
517 if (heap > ((-__PAGE_OFFSET-(128<<20)-1) & 0x7fffffff)) in extract_kernel()
/linux/fs/bcachefs/
H A Dutil.h68 #define init_heap(heap, _size, gfp) \ argument
70 (heap)->nr = 0; \
71 (heap)->size = (_size); \
72 (heap)->data = kvmalloc((heap)->size * sizeof((heap)->data[0]),\
76 #define free_heap(heap) \ argument
78 kvfree((heap)->data); \
79 (heap)->data = NULL; \

1234