| /linux/fs/ubifs/ |
| H A D | lprops.c | 50 static void move_up_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in move_up_lpt_heap() argument 63 val2 = get_heap_comp_val(heap->arr[ppos], cat); in move_up_lpt_heap() 67 heap->arr[ppos]->hpos = hpos; in move_up_lpt_heap() 68 heap->arr[hpos] = heap->arr[ppos]; in move_up_lpt_heap() 69 heap->arr[ppos] = lprops; in move_up_lpt_heap() 87 static void adjust_lpt_heap(struct ubifs_info *c, struct ubifs_lpt_heap *heap, in adjust_lpt_heap() argument 97 val2 = get_heap_comp_val(heap->arr[ppos], cat); in adjust_lpt_heap() 101 heap->arr[ppos]->hpos = hpos; in adjust_lpt_heap() 102 heap->arr[hpos] = heap->arr[ppos]; in adjust_lpt_heap() 103 heap->arr[ppos] = lprops; in adjust_lpt_heap() [all …]
|
| H A D | find.c | 45 struct ubifs_lpt_heap *heap; in valuable() local 51 heap = &c->lpt_heap[cat - 1]; in valuable() 52 if (heap->cnt < heap->max_cnt) in valuable() 130 struct ubifs_lpt_heap *heap; in scan_for_dirty() local 135 heap = &c->lpt_heap[LPROPS_FREE - 1]; in scan_for_dirty() 136 for (i = 0; i < heap->cnt; i++) { in scan_for_dirty() 137 lprops = heap->arr[i]; in scan_for_dirty() 226 struct ubifs_lpt_heap *heap, *idx_heap; in ubifs_find_dirty_leb() local 272 heap = &c->lpt_heap[LPROPS_DIRTY - 1]; in ubifs_find_dirty_leb() 292 if (heap->cnt) { in ubifs_find_dirty_leb() [all …]
|
| H A D | lpt_commit.c | 764 struct ubifs_lpt_heap *heap; in populate_lsave() local 791 heap = &c->lpt_heap[LPROPS_DIRTY_IDX - 1]; in populate_lsave() 792 for (i = 0; i < heap->cnt; i++) { in populate_lsave() 793 c->lsave[cnt++] = heap->arr[i]->lnum; in populate_lsave() 797 heap = &c->lpt_heap[LPROPS_DIRTY - 1]; in populate_lsave() 798 for (i = 0; i < heap->cnt; i++) { in populate_lsave() 799 c->lsave[cnt++] = heap->arr[i]->lnum; in populate_lsave() 803 heap = &c->lpt_heap[LPROPS_FREE - 1]; in populate_lsave() 804 for (i = 0; i < heap->cnt; i++) { in populate_lsave() 805 c->lsave[cnt++] = heap->arr[i]->lnum; in populate_lsave() [all …]
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
| H A D | base.c | 255 nvkm_mmu_type(struct nvkm_mmu *mmu, int heap, u8 type) in nvkm_mmu_type() argument 257 if (heap >= 0 && !WARN_ON(mmu->type_nr == ARRAY_SIZE(mmu->type))) { in nvkm_mmu_type() 258 mmu->type[mmu->type_nr].type = type | mmu->heap[heap].type; in nvkm_mmu_type() 259 mmu->type[mmu->type_nr].heap = heap; in nvkm_mmu_type() 268 if (!WARN_ON(mmu->heap_nr == ARRAY_SIZE(mmu->heap))) { in nvkm_mmu_heap() 269 mmu->heap[mmu->heap_nr].type = type; in nvkm_mmu_heap() 270 mmu->heap[mmu->heap_nr].size = size; in nvkm_mmu_heap() 282 int heap; in nvkm_mmu_host() local 285 heap = nvkm_mmu_heap(mmu, NVKM_MEM_HOST, ~0ULL); in nvkm_mmu_host() 286 nvkm_mmu_type(mmu, heap, type); in nvkm_mmu_host() [all …]
|
| H A D | ummu.c | 69 args->v0.size = mmu->heap[index].size; in nvkm_ummu_heap() 90 args->v0.heap = mmu->type[index].heap; in nvkm_ummu_type()
|
| /linux/tools/include/nolibc/ |
| H A D | stdlib.h | 94 struct nolibc_heap *heap; in free() local 99 heap = container_of(ptr, struct nolibc_heap, user_p); in free() 100 munmap(heap, heap->len); in free() 131 struct nolibc_heap *heap; in malloc() local 134 len = sizeof(*heap) + len; in malloc() 136 heap = mmap(NULL, len, PROT_READ|PROT_WRITE, MAP_ANONYMOUS|MAP_PRIVATE, in malloc() 138 if (__builtin_expect(heap == MAP_FAILED, 0)) in malloc() 141 heap->len = len; in malloc() 142 return heap->user_p; in malloc() 165 struct nolibc_heap *heap; in realloc() local [all …]
|
| /linux/drivers/gpu/drm/nouveau/include/nvkm/core/ |
| H A D | mm.h | 12 u8 heap; member 34 int nvkm_mm_init(struct nvkm_mm *, u8 heap, u32 offset, u32 length, u32 block); 36 int nvkm_mm_head(struct nvkm_mm *, u8 heap, u8 type, u32 size_max, 38 int nvkm_mm_tail(struct nvkm_mm *, u8 heap, u8 type, u32 size_max, 44 nvkm_mm_heap_size(struct nvkm_mm *mm, u8 heap) in nvkm_mm_heap_size() argument 49 if (node->heap == heap) in nvkm_mm_heap_size()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/core/ |
| H A D | mm.c | 99 b->heap = a->heap; in region_head() 111 nvkm_mm_head(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_head() argument 122 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_head() 123 if (this->heap != heap) in nvkm_mm_head() 175 b->heap = a->heap; in region_tail() 186 nvkm_mm_tail(struct nvkm_mm *mm, u8 heap, u8 type, u32 size_max, u32 size_min, in nvkm_mm_tail() argument 198 if (unlikely(heap != NVKM_MM_HEAP_ANY)) { in nvkm_mm_tail() 199 if (this->heap != heap) in nvkm_mm_tail() 240 nvkm_mm_init(struct nvkm_mm *mm, u8 heap, u32 offset, u32 length, u32 block) in nvkm_mm_init() argument 277 node->heap = heap; in nvkm_mm_init()
|
| H A D | gpuobj.c | 180 ret = nvkm_mm_head(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor() 183 ret = nvkm_mm_tail(&parent->heap, 0, 1, size, size, in nvkm_gpuobj_ctor() 211 return nvkm_mm_init(&gpuobj->heap, 0, 0, gpuobj->size, 1); in nvkm_gpuobj_ctor() 220 nvkm_mm_free(&gpuobj->parent->heap, &gpuobj->node); in nvkm_gpuobj_del() 221 nvkm_mm_fini(&gpuobj->heap); in nvkm_gpuobj_del()
|
| /linux/drivers/gpu/drm/nouveau/nvif/ |
| H A D | mmu.c | 35 kfree(mmu->heap); in nvif_mmu_dtor() 53 mmu->heap = NULL; in nvif_mmu_ctor() 72 mmu->heap = kmalloc_array(mmu->heap_nr, sizeof(*mmu->heap), in nvif_mmu_ctor() 76 if (ret = -ENOMEM, !mmu->heap || !mmu->type) in nvif_mmu_ctor() 92 mmu->heap[i].size = args.size; in nvif_mmu_ctor() 112 mmu->type[i].heap = args.heap; in nvif_mmu_ctor()
|
| /linux/lib/zlib_deflate/ |
| H A D | deftree.c | 298 top = s->heap[SMALLEST]; \ 299 s->heap[SMALLEST] = s->heap[s->heap_len--]; \ 323 int v = s->heap[k]; in pqdownheap() 328 smaller(tree, s->heap[j+1], s->heap[j], s->depth)) { in pqdownheap() 332 if (smaller(tree, v, s->heap[j], s->depth)) break; in pqdownheap() 335 s->heap[k] = s->heap[j]; k = j; in pqdownheap() 340 s->heap[k] = v; in pqdownheap() 376 tree[s->heap[s->heap_max]].Len = 0; /* root of the heap */ in gen_bitlen() 379 n = s->heap[h]; in gen_bitlen() 420 m = s->heap[--h]; in gen_bitlen() [all …]
|
| /linux/include/linux/ |
| H A D | dma-heap.h | 23 struct dma_buf *(*allocate)(struct dma_heap *heap, 43 void *dma_heap_get_drvdata(struct dma_heap *heap); 45 const char *dma_heap_get_name(struct dma_heap *heap);
|
| /linux/tools/perf/util/ |
| H A D | powerpc-vpadtl.c | 26 struct auxtrace_heap heap; member 380 if (!vpa->heap.heap_cnt) in powerpc_vpadtl_process_queues() 383 if (vpa->heap.heap_array[0].ordinal >= timestamp) in powerpc_vpadtl_process_queues() 386 queue_nr = vpa->heap.heap_array[0].queue_nr; in powerpc_vpadtl_process_queues() 390 auxtrace_heap__pop(&vpa->heap); in powerpc_vpadtl_process_queues() 392 if (vpa->heap.heap_cnt) { in powerpc_vpadtl_process_queues() 393 ts = vpa->heap.heap_array[0].ordinal + 1; in powerpc_vpadtl_process_queues() 402 auxtrace_heap__add(&vpa->heap, queue_nr, ts); in powerpc_vpadtl_process_queues() 407 ret = auxtrace_heap__add(&vpa->heap, queue_nr, ts); in powerpc_vpadtl_process_queues() 477 ret = auxtrace_heap__add(&vpa->heap, queue_nr, vpaq->timestamp); in powerpc_vpadtl__setup_queue() [all …]
|
| H A D | s390-cpumsf.c | 171 struct auxtrace_heap heap; member 819 return auxtrace_heap__add(&sf->heap, queue_nr, ts); in s390_cpumsf_setup_queue() 855 if (!sf->heap.heap_cnt) in s390_cpumsf_process_queues() 858 if (sf->heap.heap_array[0].ordinal >= timestamp) in s390_cpumsf_process_queues() 861 queue_nr = sf->heap.heap_array[0].queue_nr; in s390_cpumsf_process_queues() 865 auxtrace_heap__pop(&sf->heap); in s390_cpumsf_process_queues() 866 if (sf->heap.heap_cnt) { in s390_cpumsf_process_queues() 867 ts = sf->heap.heap_array[0].ordinal + 1; in s390_cpumsf_process_queues() 876 auxtrace_heap__add(&sf->heap, queue_nr, ts); in s390_cpumsf_process_queues() 880 ret = auxtrace_heap__add(&sf->heap, queue_nr, ts); in s390_cpumsf_process_queues() [all …]
|
| H A D | intel-bts.c | 47 struct auxtrace_heap heap; member 196 ret = auxtrace_heap__add(&bts->heap, queue_nr, in intel_bts_setup_queue() 563 if (!bts->heap.heap_cnt) in intel_bts_process_queues() 566 if (bts->heap.heap_array[0].ordinal > timestamp) in intel_bts_process_queues() 569 queue_nr = bts->heap.heap_array[0].queue_nr; in intel_bts_process_queues() 573 auxtrace_heap__pop(&bts->heap); in intel_bts_process_queues() 577 auxtrace_heap__add(&bts->heap, queue_nr, ts); in intel_bts_process_queues() 582 ret = auxtrace_heap__add(&bts->heap, queue_nr, ts); in intel_bts_process_queues() 727 auxtrace_heap__free(&bts->heap); in intel_bts_free()
|
| /linux/drivers/md/bcache/ |
| H A D | util.h | 39 #define init_heap(heap, _size, gfp) \ argument 42 (heap)->used = 0; \ 43 (heap)->size = (_size); \ 44 _bytes = (heap)->size * sizeof(*(heap)->data); \ 45 (heap)->data = kvmalloc(_bytes, (gfp) & GFP_KERNEL); \ 46 (heap)->data; \ 49 #define free_heap(heap) \ argument 51 kvfree((heap)->data); \ 52 (heap)->data = NULL; \
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
| H A D | nv04.c | 32 struct nvkm_mm heap; member 104 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv04_instobj_dtor() 137 ret = nvkm_mm_head(&imem->heap, 0, 1, size, size, align ? align : 1, &iobj->node); in nv04_instobj_new() 210 ret = nvkm_mm_init(&imem->heap, 0, 0, imem->base.reserved, 1); in nv04_instmem_oneinit() 248 nvkm_mm_fini(&imem->heap); in nv04_instmem_dtor()
|
| /linux/arch/x86/boot/compressed/ |
| H A D | misc.c | 410 memptr heap = (memptr)boot_heap; in extract_kernel() local 458 free_mem_ptr = heap; /* Heap */ in extract_kernel() 459 free_mem_end_ptr = heap + BOOT_HEAP_SIZE; in extract_kernel() 501 if (heap > 0x3fffffffffffUL) in extract_kernel() 506 if (heap > ((-__PAGE_OFFSET-(128<<20)-1) & 0x7fffffff)) in extract_kernel()
|
| /linux/arch/mips/boot/compressed/ |
| H A D | head.S | 32 PTR_LA a0, (.heap) /* heap address */ 51 .comm .heap,BOOT_HEAP_SIZE,4
|
| /linux/drivers/gpu/drm/nouveau/include/nvif/ |
| H A D | mmu.h | 16 } *heap; member 28 u8 heap; member
|
| /linux/drivers/dma-buf/heaps/ |
| H A D | system_heap.c | 25 struct dma_heap *heap; member 338 static struct dma_buf *system_heap_allocate(struct dma_heap *heap, in system_heap_allocate() argument 360 buffer->heap = heap; in system_heap_allocate() 397 exp_info.exp_name = dma_heap_get_name(heap); in system_heap_allocate()
|
| /linux/drivers/gpu/drm/lima/ |
| H A D | lima_gp.c | 148 task->heap = bo; in lima_gp_task_run() 253 if (fail_size == task->heap->heap_size) { in lima_gp_task_recover() 256 ret = lima_heap_alloc(task->heap, task->vm); in lima_gp_task_recover() 266 f[LIMA_GP_PLBU_ALLOC_START_ADDR >> 2] + task->heap->heap_size; in lima_gp_task_recover()
|
| /linux/kernel/configs/ |
| H A D | hardening.config | 54 # Sampling-based heap out-of-bounds and use-after-free detection. 60 # Initialize all heap variables to zero on allocation. 63 # Initialize all heap variables to zero on free to reduce stale data lifetime.
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
| H A D | ram.c | 148 nvkm_ram_get(struct nvkm_device *device, u8 heap, u8 type, u8 rpage, u64 size, in nvkm_ram_get() argument 177 ret = nvkm_mm_tail(mm, heap, type, max, min, align, &r); in nvkm_ram_get() 179 ret = nvkm_mm_head(mm, heap, type, max, min, align, &r); in nvkm_ram_get()
|
| /linux/tools/testing/selftests/dmabuf-heaps/ |
| H A D | .gitignore | 1 dmabuf-heap
|