/linux/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_ttm.c | 78 return ttm_range_man_init(&adev->mman.bdev, type, in amdgpu_ttm_init_on_chip() 130 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_evict_flags() 193 BUG_ON(adev->mman.buffer_funcs->copy_max_bytes < in amdgpu_ttm_map_buffer() 223 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_ttm_map_buffer() 226 r = amdgpu_job_alloc_with_ib(adev, &adev->mman.high_pr, in amdgpu_ttm_map_buffer() 294 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_copy_mem_to_mem() 301 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_ttm_copy_mem_to_mem() 309 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem() 364 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem() 559 if (adev->mman.buffer_funcs_enabled && in amdgpu_bo_move() [all …]
|
H A D | amdgpu_gtt_mgr.c | 51 man = ttm_manager_type(&adev->mman.bdev, TTM_PL_TT); in amdgpu_mem_info_gtt_total_show() 69 struct ttm_resource_manager *man = &adev->mman.gtt_mgr.manager; in amdgpu_mem_info_gtt_used_show() 196 adev = container_of(mgr, typeof(*adev), mman.gtt_mgr); in amdgpu_gtt_mgr_recover() 277 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_init() 284 ttm_resource_manager_init(man, &adev->mman.bdev, gtt_size); in amdgpu_gtt_mgr_init() 291 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, &mgr->manager); in amdgpu_gtt_mgr_init() 306 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_fini() 312 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_gtt_mgr_fini() 321 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, NULL); in amdgpu_gtt_mgr_fini()
|
H A D | amdgpu_discovery.c | 250 discv_regn = memremap(pos, adev->mman.discovery_tmr_size, MEMREMAP_WC); in amdgpu_discovery_read_binary_from_sysmem() 252 memcpy(binary, discv_regn, adev->mman.discovery_tmr_size); in amdgpu_discovery_read_binary_from_sysmem() 292 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary_from_mem() 390 (struct nps_info_header *)(adev->mman.discovery_bin + offset); in amdgpu_discovery_verify_npsinfo() 397 if (!amdgpu_discovery_verify_checksum(adev->mman.discovery_bin + offset, in amdgpu_discovery_verify_npsinfo() 416 adev->mman.discovery_tmr_size = DISCOVERY_TMR_SIZE; in amdgpu_discovery_init() 417 adev->mman.discovery_bin = kzalloc(adev->mman.discovery_tmr_size, GFP_KERNEL); in amdgpu_discovery_init() 418 if (!adev->mman.discovery_bin) in amdgpu_discovery_init() 424 r = amdgpu_discovery_read_binary_from_file(adev, adev->mman.discovery_bin); in amdgpu_discovery_init() 434 adev, adev->mman.discovery_bin); in amdgpu_discovery_init() [all …]
|
H A D | amdgpu_virt.c | 367 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_virt_ras_reserve_bps() 385 amdgpu_vram_mgr_reserve_range(&adev->mman.vram_mgr, in amdgpu_virt_ras_reserve_bps() 426 if (adev->mman.fw_vram_usage_va) in amdgpu_virt_add_bad_page() 427 vram_usage_va = adev->mman.fw_vram_usage_va; in amdgpu_virt_add_bad_page() 429 vram_usage_va = adev->mman.drv_vram_usage_va; in amdgpu_virt_add_bad_page() 602 ttm_resource_manager_usage(&adev->mman.vram_mgr.manager) >> 20; in amdgpu_virt_write_vf2pf_data() 604 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr) >> 20; in amdgpu_virt_write_vf2pf_data() 676 if (adev->mman.fw_vram_usage_va && adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 678 } else if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_init_data_exchange() 701 if (adev->mman.fw_vram_usage_va || adev->mman.drv_vram_usage_va) { in amdgpu_virt_exchange_data() [all …]
|
H A D | amdgpu_gmc.c | 617 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_gmc_flush_gpu_tlb() 624 !adev->mman.buffer_funcs_enabled || !adev->ib_pool_ready || in amdgpu_gmc_flush_gpu_tlb() 652 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_gmc_flush_gpu_tlb() 653 r = amdgpu_job_alloc_with_ib(ring->adev, &adev->mman.high_pr, in amdgpu_gmc_flush_gpu_tlb() 665 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_gmc_flush_gpu_tlb() 673 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_gmc_flush_gpu_tlb() 932 adev->mman.stolen_reserved_offset = 0; in amdgpu_gmc_get_vbios_allocations() 933 adev->mman.stolen_reserved_size = 0; in amdgpu_gmc_get_vbios_allocations() 945 adev->mman.keep_stolen_vga_memory = true; in amdgpu_gmc_get_vbios_allocations() 951 adev->mman.stolen_reserved_offset = 0x500000; in amdgpu_gmc_get_vbios_allocations() [all …]
|
H A D | amdgpu_vram_mgr.c | 52 return container_of(mgr, struct amdgpu_device, mman.vram_mgr); in to_amdgpu_device() 141 struct ttm_resource_manager *man = &adev->mman.vram_mgr.manager; in amdgpu_mem_info_vram_used_show() 162 amdgpu_vram_mgr_vis_usage(&adev->mman.vram_mgr)); in amdgpu_mem_info_vis_vram_used_show() 907 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_init() 911 ttm_resource_manager_init(man, &adev->mman.bdev, in amdgpu_vram_mgr_init() 930 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, &mgr->manager); in amdgpu_vram_mgr_init() 945 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_fini() 952 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_vram_mgr_fini() 969 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, NULL); in amdgpu_vram_mgr_fini()
|
H A D | psp_v11_0.c | 511 if (adev->gmc.visible_vram_size < sz || !adev->mman.aper_base_kaddr) { in psp_v11_0_memory_training() 514 adev->mman.aper_base_kaddr); in psp_v11_0_memory_training() 525 memcpy_fromio(buf, adev->mman.aper_base_kaddr, sz); in psp_v11_0_memory_training() 534 memcpy_toio(adev->mman.aper_base_kaddr, buf, sz); in psp_v11_0_memory_training()
|
H A D | amdgpu_benchmark.c | 40 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_benchmark_do_move() 101 if (adev->mman.buffer_funcs) { in amdgpu_benchmark_move()
|
H A D | psp_v14_0.c | 467 if (adev->gmc.visible_vram_size < sz || !adev->mman.aper_base_kaddr) { in psp_v14_0_memory_training() 470 adev->mman.aper_base_kaddr); in psp_v14_0_memory_training() 481 memcpy_fromio(buf, adev->mman.aper_base_kaddr, sz); in psp_v14_0_memory_training() 490 memcpy_toio(adev->mman.aper_base_kaddr, buf, sz); in psp_v14_0_memory_training()
|
H A D | amdgpu_atomfirmware.c | 123 adev->mman.fw_vram_usage_start_offset = (start_addr & in amdgpu_atomfirmware_allocate_fb_v2_1() 125 adev->mman.fw_vram_usage_size = fw_size << 10; in amdgpu_atomfirmware_allocate_fb_v2_1() 155 adev->mman.fw_vram_usage_start_offset = (fw_start_addr & in amdgpu_atomfirmware_allocate_fb_v2_2() 157 adev->mman.fw_vram_usage_size = fw_size << 10; in amdgpu_atomfirmware_allocate_fb_v2_2() 164 adev->mman.drv_vram_usage_start_offset = (drv_start_addr & in amdgpu_atomfirmware_allocate_fb_v2_2() 166 adev->mman.drv_vram_usage_size = drv_size << 10; in amdgpu_atomfirmware_allocate_fb_v2_2()
|
H A D | psp_v13_0.c | 589 if (adev->gmc.visible_vram_size < sz || !adev->mman.aper_base_kaddr) { in psp_v13_0_memory_training() 592 adev->mman.aper_base_kaddr); in psp_v13_0_memory_training() 603 memcpy_fromio(buf, adev->mman.aper_base_kaddr, sz); in psp_v13_0_memory_training() 612 memcpy_toio(adev->mman.aper_base_kaddr, buf, sz); in psp_v13_0_memory_training()
|
/linux/drivers/gpu/drm/qxl/ |
H A D | qxl_ttm.c | 42 struct qxl_mman *mman; in qxl_get_qdev() local 45 mman = container_of(bdev, struct qxl_mman, bdev); in qxl_get_qdev() 46 qdev = container_of(mman, struct qxl_device, mman); in qxl_get_qdev() 187 return ttm_range_man_init(&qdev->mman.bdev, type, false, size); in qxl_ttm_init_mem_type() 196 r = ttm_device_init(&qdev->mman.bdev, &qxl_bo_driver, NULL, in qxl_ttm_init() 228 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_ttm_fini() 229 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_ttm_fini() 230 ttm_device_fini(&qdev->mman.bdev); in qxl_ttm_fini() 237 ttm_resource_manager_create_debugfs(ttm_manager_type(&qdev->mman.bdev, in qxl_ttm_debugfs_init() 240 ttm_resource_manager_create_debugfs(ttm_manager_type(&qdev->mman.bdev, in qxl_ttm_debugfs_init()
|
H A D | qxl_object.c | 139 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, type, in qxl_bo_create() 408 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_surf_evict() 409 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_surf_evict() 416 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_vram_evict() 417 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_vram_evict()
|
/linux/tools/perf/trace/beauty/ |
H A D | mmap_flags.sh | 15 linux_mman=${linux_header_dir}/mman.h 16 arch_mman=${arch_header_dir}/mman.h 34 (grep -E $regex ${header_dir}/mman-common.h | \ 40 (grep -E $regex ${header_dir}/mman.h | \
|
H A D | mmap_prot.sh | 13 common_mman=${asm_header_dir}/mman-common.h 14 arch_mman=${arch_header_dir}/mman.h
|
H A D | madvise_behavior.sh | 8 grep -E $regex ${header_dir}/mman-common.h | \
|
H A D | pkey_alloc_access_rights.sh | 8 grep -E $regex ${header_dir}/mman-common.h | \
|
H A D | mremap_flags.sh | 10 linux_mman=${linux_header_dir}/mman.h
|
/linux/tools/perf/ |
H A D | check-headers.sh | 72 "include/uapi/asm-generic/mman-common.h" 193 check include/uapi/asm-generic/mman.h '-I "^#include <\(uapi/\)*asm-generic/mman-common\(-tools\)*.h>"' 194 check include/uapi/linux/mman.h '-I "^#include <\(uapi/\)*asm/mman.h>"'
|
H A D | Makefile.perf | 568 $(pkey_alloc_access_rights_array): $(asm_generic_hdr_dir)/mman-common.h $(pkey_alloc_access_rights_… 628 $(madvise_behavior_array): $(madvise_hdr_dir)/mman-common.h $(madvise_behavior_tbl) 634 $(mmap_flags_array): $(linux_uapi_dir)/mman.h $(asm_generic_uapi_dir)/mman.h $(asm_generic_uapi_dir… 640 $(mremap_flags_array): $(linux_uapi_dir)/mman.h $(mremap_flags_tbl) 658 $(mmap_prot_array): $(asm_generic_uapi_dir)/mman.h $(asm_generic_uapi_dir)/mman-common.h $(mmap_pro…
|
/linux/drivers/gpu/drm/i915/selftests/ |
H A D | i915_live_selftests.h | 33 selftest(mman, i915_gem_mman_live_selftests)
|
/linux/Documentation/userspace-api/media/dvb/ |
H A D | dmx-munmap.rst | 23 #include <sys/mman.h>
|
/linux/include/uapi/asm-generic/ |
H A D | Kbuild | 15 mandatory-y += mman.h
|
/linux/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_migrate.c | 64 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in svm_migrate_gart_map() 67 r = amdgpu_job_alloc_with_ib(adev, &adev->mman.high_pr, in svm_migrate_gart_map() 130 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in svm_migrate_copy_memory_gart() 136 mutex_lock(&adev->mman.gtt_window_lock); in svm_migrate_copy_memory_gart() 172 mutex_unlock(&adev->mman.gtt_window_lock); in svm_migrate_copy_memory_gart()
|
/linux/Documentation/userspace-api/media/v4l/ |
H A D | func-munmap.rst | 21 #include <sys/mman.h>
|