Lines Matching full:gmc
53 u64 vram_size = adev->gmc.xgmi.node_segment_size * adev->gmc.xgmi.num_physical_nodes; in amdgpu_gmc_pdb0_alloc()
54 uint32_t pde0_page_shift = adev->gmc.vmid0_page_table_block_size + 21; in amdgpu_gmc_pdb0_alloc()
67 r = amdgpu_bo_create(adev, &bp, &adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
71 r = amdgpu_bo_reserve(adev->gmc.pdb0_bo, false); in amdgpu_gmc_pdb0_alloc()
75 r = amdgpu_bo_pin(adev->gmc.pdb0_bo, AMDGPU_GEM_DOMAIN_VRAM); in amdgpu_gmc_pdb0_alloc()
78 r = amdgpu_bo_kmap(adev->gmc.pdb0_bo, &adev->gmc.ptr_pdb0); in amdgpu_gmc_pdb0_alloc()
82 amdgpu_bo_unreserve(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
86 amdgpu_bo_unpin(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
88 amdgpu_bo_unreserve(adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
90 amdgpu_bo_unref(&adev->gmc.pdb0_bo); in amdgpu_gmc_pdb0_alloc()
190 if (bo->ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr()
193 return adev->gmc.agp_start + bo->ttm->dma_address[0]; in amdgpu_gmc_agp_addr()
282 u64 max_mc_address = min(adev->gmc.mc_mask, AMDGPU_GMC_HOLE_START - 1); in amdgpu_gmc_gart_location()
405 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults() local
417 if (gmc->fault_ring[gmc->last_fault].timestamp >= stamp) in amdgpu_gmc_filter_faults()
422 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults()
443 fault = &gmc->fault_ring[fault->next]; in amdgpu_gmc_filter_faults()
451 fault = &gmc->fault_ring[gmc->last_fault]; in amdgpu_gmc_filter_faults()
456 fault->next = gmc->fault_hash[hash].idx; in amdgpu_gmc_filter_faults()
457 gmc->fault_hash[hash].idx = gmc->last_fault++; in amdgpu_gmc_filter_faults()
474 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults_remove() local
495 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults_remove()
507 fault = &gmc->fault_ring[fault->next]; in amdgpu_gmc_filter_faults_remove()
633 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb()
634 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, in amdgpu_gmc_flush_gpu_tlb()
637 if (adev->gmc.flush_tlb_needs_extra_type_0 && flush_type == 2) in amdgpu_gmc_flush_gpu_tlb()
638 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, in amdgpu_gmc_flush_gpu_tlb()
641 adev->gmc.gmc_funcs->flush_gpu_tlb(adev, vmid, vmhub, in amdgpu_gmc_flush_gpu_tlb()
696 if (!adev->gmc.flush_pasid_uses_kiq || !ring->sched.ready) { in amdgpu_gmc_flush_gpu_tlb_pasid()
697 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb_pasid()
698 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
702 if (adev->gmc.flush_tlb_needs_extra_type_0 && flush_type == 2) in amdgpu_gmc_flush_gpu_tlb_pasid()
703 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
707 adev->gmc.gmc_funcs->flush_gpu_tlb_pasid(adev, pasid, in amdgpu_gmc_flush_gpu_tlb_pasid()
715 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb_pasid()
718 if (adev->gmc.flush_tlb_needs_extra_type_0) in amdgpu_gmc_flush_gpu_tlb_pasid()
727 if (adev->gmc.flush_tlb_needs_extra_type_2) in amdgpu_gmc_flush_gpu_tlb_pasid()
730 if (flush_type == 2 && adev->gmc.flush_tlb_needs_extra_type_0) in amdgpu_gmc_flush_gpu_tlb_pasid()
828 adev->gmc.tmz_enabled = false; in amdgpu_gmc_tmz_set()
832 adev->gmc.tmz_enabled = true; in amdgpu_gmc_tmz_set()
857 adev->gmc.tmz_enabled = false; in amdgpu_gmc_tmz_set()
861 adev->gmc.tmz_enabled = true; in amdgpu_gmc_tmz_set()
867 adev->gmc.tmz_enabled = false; in amdgpu_gmc_tmz_set()
883 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_noretry_set() local
894 gmc->noretry = 1; in amdgpu_gmc_noretry_set()
896 gmc->noretry = (amdgpu_noretry == -1) ? noretry_default : amdgpu_noretry; in amdgpu_gmc_noretry_set()
976 if ((adev->gmc.real_vram_size - size) < (8 * 1024 * 1024)) in amdgpu_gmc_get_vbios_allocations()
1010 u64 vram_size = adev->gmc.xgmi.node_segment_size * adev->gmc.xgmi.num_physical_nodes; in amdgpu_gmc_init_pdb0()
1011 u64 pde0_page_size = (1ULL<<adev->gmc.vmid0_page_table_block_size)<<21; in amdgpu_gmc_init_pdb0()
1013 adev->gmc.xgmi.physical_node_id * adev->gmc.xgmi.node_segment_size; in amdgpu_gmc_init_pdb0()
1024 flags |= AMDGPU_PTE_FRAG((adev->gmc.vmid0_page_table_block_size + 9*1)); in amdgpu_gmc_init_pdb0()
1031 amdgpu_gmc_set_pte_pde(adev, adev->gmc.ptr_pdb0, i, vram_addr, flags); in amdgpu_gmc_init_pdb0()
1040 amdgpu_gmc_set_pte_pde(adev, adev->gmc.ptr_pdb0, i, gart_ptb_gpu_pa, flags); in amdgpu_gmc_init_pdb0()
1053 return mc_addr - adev->gmc.vram_start + adev->vm_manager.vram_base_offset; in amdgpu_gmc_vram_mc2pa()
1139 for_each_inst(mode, adev->gmc.supported_nps_modes) { in available_memory_partition_show()
1159 for_each_inst(i, adev->gmc.supported_nps_modes) { in current_memory_partition_store()
1169 if (mode == adev->gmc.gmc_funcs->query_mem_partition_mode(adev)) { in current_memory_partition_store()
1184 adev->gmc.requested_nps_mode = mode; in current_memory_partition_store()
1201 mode = adev->gmc.gmc_funcs->query_mem_partition_mode(adev); in current_memory_partition_show()
1217 if (!adev->gmc.gmc_funcs->query_mem_partition_mode) in amdgpu_gmc_sysfs_init()
1220 nps_switch_support = (hweight32(adev->gmc.supported_nps_modes & in amdgpu_gmc_sysfs_init()
1238 if (!adev->gmc.gmc_funcs->query_mem_partition_mode) in amdgpu_gmc_sysfs_fini()
1258 (adev->gmc.reset_flags & AMDGPU_GMC_INIT_RESET_NPS); in amdgpu_gmc_get_nps_memranges()
1346 return (((BIT(req_nps_mode) & adev->gmc.supported_nps_modes) == in amdgpu_gmc_need_nps_switch_req()
1356 if (amdgpu_sriov_vf(adev) || !adev->gmc.supported_nps_modes || in amdgpu_gmc_prepare_nps_mode_change()
1357 !adev->gmc.gmc_funcs->request_mem_partition_mode) in amdgpu_gmc_prepare_nps_mode_change()
1360 cur_nps_mode = adev->gmc.gmc_funcs->query_mem_partition_mode(adev); in amdgpu_gmc_prepare_nps_mode_change()
1374 req_nps_mode = adev->gmc.requested_nps_mode; in amdgpu_gmc_prepare_nps_mode_change()
1379 r = adev->gmc.gmc_funcs->request_mem_partition_mode(adev, req_nps_mode); in amdgpu_gmc_prepare_nps_mode_change()
1391 if (adev->gmc.gmc_funcs->need_reset_on_init) in amdgpu_gmc_need_reset_on_init()
1392 return adev->gmc.gmc_funcs->need_reset_on_init(adev); in amdgpu_gmc_need_reset_on_init()