Lines Matching refs:addr

156 	vm_offset_t addr;  in kva_alloc()  local
161 VMEM_ADDR_MAX, M_BESTFIT | M_NOWAIT, &addr)) in kva_alloc()
165 return (addr); in kva_alloc()
177 vm_offset_t addr; in kva_alloc_aligned() local
182 VMEM_ADDR_MAX, M_BESTFIT | M_NOWAIT, &addr)) in kva_alloc_aligned()
186 return (addr); in kva_alloc_aligned()
199 kva_free(vm_offset_t addr, vm_size_t size) in kva_free() argument
203 vmem_xfree(kernel_arena, addr, size); in kva_free()
211 kmem_alloc_san(vm_offset_t addr, vm_size_t size, vm_size_t asize, int flags) in kmem_alloc_san() argument
214 kmsan_mark((void *)addr, asize, KMSAN_STATE_UNINIT); in kmem_alloc_san()
215 kmsan_orig((void *)addr, asize, KMSAN_TYPE_KMEM, in kmem_alloc_san()
218 kmsan_mark((void *)addr, asize, KMSAN_STATE_INITED); in kmem_alloc_san()
220 kasan_mark((void *)addr, size, asize, KASAN_KMEM_REDZONE); in kmem_alloc_san()
267 vm_offset_t addr, i, offset; in kmem_alloc_attr_domain() local
276 if (vmem_alloc(vmem, asize, M_BESTFIT | flags, &addr)) in kmem_alloc_attr_domain()
278 offset = addr - VM_MIN_KERNEL_ADDRESS; in kmem_alloc_attr_domain()
287 kmem_unback(object, addr, i); in kmem_alloc_attr_domain()
288 vmem_free(vmem, addr, asize); in kmem_alloc_attr_domain()
297 pmap_enter(kernel_pmap, addr + i, m, prot, in kmem_alloc_attr_domain()
301 kmem_alloc_san(addr, size, asize, flags); in kmem_alloc_attr_domain()
302 return ((void *)addr); in kmem_alloc_attr_domain()
320 void *addr; in kmem_alloc_attr_domainset() local
328 addr = kmem_alloc_attr_domain(domain, size, flags, low, high, in kmem_alloc_attr_domainset()
330 if (addr != NULL) in kmem_alloc_attr_domainset()
340 return (addr); in kmem_alloc_attr_domainset()
358 vm_offset_t addr, offset, tmp; in kmem_alloc_contig_domain() local
367 if (vmem_alloc(vmem, asize, flags | M_BESTFIT, &addr)) in kmem_alloc_contig_domain()
369 offset = addr - VM_MIN_KERNEL_ADDRESS; in kmem_alloc_contig_domain()
377 vmem_free(vmem, addr, asize); in kmem_alloc_contig_domain()
384 tmp = addr; in kmem_alloc_contig_domain()
394 kmem_alloc_san(addr, size, asize, flags); in kmem_alloc_contig_domain()
395 return ((void *)addr); in kmem_alloc_contig_domain()
414 void *addr; in kmem_alloc_contig_domainset() local
422 addr = kmem_alloc_contig_domain(domain, size, flags, low, high, in kmem_alloc_contig_domainset()
424 if (addr != NULL) in kmem_alloc_contig_domainset()
434 return (addr); in kmem_alloc_contig_domainset()
479 vm_offset_t addr; in kmem_malloc_domain() local
490 if (vmem_alloc(arena, asize, flags | M_BESTFIT, &addr)) in kmem_malloc_domain()
493 rv = kmem_back_domain(domain, kernel_object, addr, asize, flags); in kmem_malloc_domain()
495 vmem_free(arena, addr, asize); in kmem_malloc_domain()
498 kasan_mark((void *)addr, size, asize, KASAN_KMEM_REDZONE); in kmem_malloc_domain()
499 return ((void *)addr); in kmem_malloc_domain()
517 void *addr; in kmem_malloc_domainset() local
522 addr = kmem_malloc_domain(domain, size, flags); in kmem_malloc_domainset()
523 if (addr != NULL) in kmem_malloc_domainset()
527 return (addr); in kmem_malloc_domainset()
537 kmem_back_domain(int domain, vm_object_t object, vm_offset_t addr, in kmem_back_domain() argument
549 offset = addr - VM_MIN_KERNEL_ADDRESS; in kmem_back_domain()
573 kmem_unback(object, addr, i); in kmem_back_domain()
584 pmap_enter(kernel_pmap, addr + i, m, prot, in kmem_back_domain()
590 kmem_alloc_san(addr, size, size, flags); in kmem_back_domain()
600 kmem_back(vm_object_t object, vm_offset_t addr, vm_size_t size, int flags) in kmem_back() argument
608 for (start = addr, end = addr + size; addr < end; addr = next) { in kmem_back()
614 domain = (addr >> KVA_QUANTUM_SHIFT) % vm_ndomains; in kmem_back()
617 next = roundup2(addr + 1, KVA_QUANTUM); in kmem_back()
624 rv = kmem_back_domain(domain, object, addr, next - addr, flags); in kmem_back()
626 kmem_unback(object, start, addr - start); in kmem_back()
643 _kmem_unback(vm_object_t object, vm_offset_t addr, vm_size_t size) in _kmem_unback() argument
656 pmap_remove(kernel_pmap, addr, addr + size); in _kmem_unback()
657 offset = addr - VM_MIN_KERNEL_ADDRESS; in _kmem_unback()
679 kmem_unback(vm_object_t object, vm_offset_t addr, vm_size_t size) in kmem_unback() argument
682 (void)_kmem_unback(object, addr, size); in kmem_unback()
692 kmem_free(void *addr, vm_size_t size) in kmem_free() argument
697 kasan_mark(addr, size, size, 0); in kmem_free()
698 arena = _kmem_unback(kernel_object, (uintptr_t)addr, size); in kmem_free()
700 vmem_free(arena, (uintptr_t)addr, size); in kmem_free()
714 vm_offset_t addr; in kmap_alloc_wait() local
726 addr = vm_map_findspace(map, vm_map_min(map), size); in kmap_alloc_wait()
727 if (addr + size <= vm_map_max(map)) in kmap_alloc_wait()
738 vm_map_insert(map, NULL, 0, addr, addr + size, VM_PROT_RW, VM_PROT_RW, in kmap_alloc_wait()
741 return (addr); in kmap_alloc_wait()
751 kmap_free_wakeup(vm_map_t map, vm_offset_t addr, vm_size_t size) in kmap_free_wakeup() argument
755 (void) vm_map_delete(map, trunc_page(addr), round_page(addr + size)); in kmap_free_wakeup()
766 vm_offset_t addr, i; in kmem_init_zero_region() local
774 addr = kva_alloc(ZERO_REGION_SIZE); in kmem_init_zero_region()
778 pmap_qenter(addr + i, &m, 1); in kmem_init_zero_region()
779 pmap_protect(kernel_pmap, addr, addr + ZERO_REGION_SIZE, VM_PROT_READ); in kmem_init_zero_region()
781 zero_region = (const void *)addr; in kmem_init_zero_region()
790 vm_offset_t addr; in kva_import() local
797 addr = vm_map_min(kernel_map); in kva_import()
798 result = vm_map_find(kernel_map, NULL, 0, &addr, size, 0, in kva_import()
805 *addrp = addr; in kva_import()