Home
last modified time | relevance | path

Searched refs:gart (Results 1 – 25 of 49) sorted by relevance

12

/linux/drivers/gpu/drm/radeon/
H A Dradeon_gart.c75 ptr = dma_alloc_coherent(&rdev->pdev->dev, rdev->gart.table_size, in radeon_gart_table_ram_alloc()
76 &rdev->gart.table_addr, GFP_KERNEL); in radeon_gart_table_ram_alloc()
84 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_alloc()
87 rdev->gart.ptr = ptr; in radeon_gart_table_ram_alloc()
102 if (!rdev->gart.ptr) in radeon_gart_table_ram_free()
108 set_memory_wb((unsigned long)rdev->gart.ptr, in radeon_gart_table_ram_free()
109 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_free()
112 dma_free_coherent(&rdev->pdev->dev, rdev->gart.table_size, in radeon_gart_table_ram_free()
113 (void *)rdev->gart.ptr, rdev->gart.table_addr); in radeon_gart_table_ram_free()
114 rdev->gart.ptr = NULL; in radeon_gart_table_ram_free()
[all …]
H A Drs400.c85 if (rdev->gart.ptr) { in rs400_gart_init()
107 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rs400_gart_init()
165 tmp = (u32)rdev->gart.table_addr & 0xfffff000; in rs400_gart_enable()
166 tmp |= (upper_32_bits(rdev->gart.table_addr) & 0xff) << 4; in rs400_gart_enable()
193 (unsigned long long)rdev->gart.table_addr); in rs400_gart_enable()
194 rdev->gart.ready = true; in rs400_gart_enable()
237 u32 *gtt = rdev->gart.ptr; in rs400_gart_set_page()
H A Dradeon_asic.c166 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in radeon_agp_disable()
167 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in radeon_agp_disable()
168 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in radeon_agp_disable()
172 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in radeon_agp_disable()
173 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in radeon_agp_disable()
174 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in radeon_agp_disable()
208 .gart = {
276 .gart = {
372 .gart = {
440 .gart = {
[all …]
H A Dr300.c122 void __iomem *ptr = rdev->gart.ptr; in rv370_pcie_gart_set_page()
134 if (rdev->gart.robj) { in rv370_pcie_gart_init()
144 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rv370_pcie_gart_init()
145 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in rv370_pcie_gart_init()
146 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in rv370_pcie_gart_init()
147 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in rv370_pcie_gart_init()
157 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable()
172 table_addr = rdev->gart.table_addr; in rv370_pcie_gart_enable()
187 rdev->gart.ready = true; in rv370_pcie_gart_enable()
H A Drs600.c549 if (rdev->gart.robj) { in rs600_gart_init()
558 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in rs600_gart_init()
567 if (rdev->gart.robj == NULL) { in rs600_gart_enable()
604 rdev->gart.table_addr); in rs600_gart_enable()
621 (unsigned long long)rdev->gart.table_addr); in rs600_gart_enable()
622 rdev->gart.ready = true; in rs600_gart_enable()
662 void __iomem *ptr = (void *)rdev->gart.ptr; in rs600_gart_set_page()
H A Dr100.c656 if (rdev->gart.ptr) { in r100_pci_gart_init()
664 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in r100_pci_gart_init()
665 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in r100_pci_gart_init()
666 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in r100_pci_gart_init()
667 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in r100_pci_gart_init()
682 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr); in r100_pci_gart_enable()
688 (unsigned long long)rdev->gart.table_addr); in r100_pci_gart_enable()
689 rdev->gart.ready = true; in r100_pci_gart_enable()
712 u32 *gtt = rdev->gart.ptr; in r100_pci_gart_set_page()
H A Drv770.c899 if (rdev->gart.robj == NULL) { in rv770_pcie_gart_enable()
928 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in rv770_pcie_gart_enable()
939 (unsigned long long)rdev->gart.table_addr); in rv770_pcie_gart_enable()
940 rdev->gart.ready = true; in rv770_pcie_gart_enable()
H A Dni.c1252 if (rdev->gart.robj == NULL) { in cayman_pcie_gart_enable()
1281 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in cayman_pcie_gart_enable()
1327 (unsigned long long)rdev->gart.table_addr); in cayman_pcie_gart_enable()
1328 rdev->gart.ready = true; in cayman_pcie_gart_enable()
H A Dradeon_vm.c367 uint64_t src = rdev->gart.table_addr + (addr >> 12) * 8; in radeon_vm_set_pages()
599 result = rdev->gart.pages_entry[addr >> RADEON_GPU_PAGE_SHIFT]; in radeon_vm_map_gart()
H A Dr600.c1082 void __iomem *ptr = (void *)rdev->gart.ptr; in r600_pcie_gart_tlb_flush()
1116 if (rdev->gart.robj) { in r600_pcie_gart_init()
1124 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in r600_pcie_gart_init()
1133 if (rdev->gart.robj == NULL) { in r600_pcie_gart_enable()
1170 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in r600_pcie_gart_enable()
1181 (unsigned long long)rdev->gart.table_addr); in r600_pcie_gart_enable()
1182 rdev->gart.ready = true; in r600_pcie_gart_enable()
H A Dradeon.h1859 } gart; member
2362 struct radeon_gart gart; member
2701 #define radeon_gart_tlb_flush(rdev) (rdev)->asic->gart.tlb_flush((rdev))
2702 #define radeon_gart_get_page_entry(a, f) (rdev)->asic->gart.get_page_entry((a), (f))
2703 #define radeon_gart_set_page(rdev, i, e) (rdev)->asic->gart.set_page((rdev), (i), (e))
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_chan.h26 struct nvif_object gart; member
64 u32 vram, u32 gart, struct nouveau_channel **);
H A Dnouveau_chan.c101 nvif_object_dtor(&chan->gart); in nouveau_channel_del()
357 nouveau_channel_init(struct nouveau_channel *chan, u32 vram, u32 gart)
390 /* allocate dma objects to cover all allowed vram, and gart */ in nouveau_channel_init()
428 ret = nvif_object_ctor(&chan->user, "abi16ChanGartCtxDma", gart, in nouveau_channel_init()
430 &chan->gart); in nouveau_channel_init()
490 bool priv, u64 runm, u32 vram, u32 gart, struct nouveau_channel **pchan)
500 ret = nouveau_channel_init(*pchan, vram, gart); in nouveau_channel_new()
359 nouveau_channel_init(struct nouveau_channel * chan,u32 vram,u32 gart) nouveau_channel_init() argument
492 nouveau_channel_new(struct nouveau_cli * cli,bool priv,u64 runm,u32 vram,u32 gart,struct nouveau_channel ** pchan) nouveau_channel_new() argument
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dgmc_v12_0.c731 if (adev->gart.bo) { in gmc_v12_0_gart_init()
741 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v12_0_gart_init()
742 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_GFX12(0ULL, MTYPE_UC) | in gmc_v12_0_gart_init()
889 if (adev->gart.bo == NULL) { in gmc_v12_0_gart_enable()
911 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v12_0_gart_enable()
H A Dgmc_v11_0.c718 if (adev->gart.bo) { in gmc_v11_0_gart_init()
728 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v11_0_gart_init()
729 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(0ULL, MTYPE_UC) | in gmc_v11_0_gart_init()
887 if (adev->gart.bo == NULL) { in gmc_v11_0_gart_enable()
909 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v11_0_gart_enable()
H A Dgmc_v6_0.c475 if (adev->gart.bo == NULL) { in gmc_v6_0_gart_enable()
481 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v6_0_gart_enable()
566 if (adev->gart.bo) { in gmc_v6_0_gart_init()
573 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v6_0_gart_init()
574 adev->gart.gart_pte_flags = 0; in gmc_v6_0_gart_init()
H A Dgmc_v10_0.c758 if (adev->gart.bo) { in gmc_v10_0_gart_init()
768 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v10_0_gart_init()
769 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(0ULL, MTYPE_UC) | in gmc_v10_0_gart_init()
952 if (adev->gart.bo == NULL) { in gmc_v10_0_gart_enable()
986 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v10_0_gart_enable()
H A Dgmc_v7_0.c616 if (adev->gart.bo == NULL) { in gmc_v7_0_gart_enable()
621 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v7_0_gart_enable()
716 if (adev->gart.bo) { in gmc_v7_0_gart_init()
724 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v7_0_gart_init()
725 adev->gart.gart_pte_flags = 0; in gmc_v7_0_gart_init()
H A Dgmc_v8_0.c831 if (adev->gart.bo == NULL) { in gmc_v8_0_gart_enable()
836 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v8_0_gart_enable()
948 if (adev->gart.bo) { in gmc_v8_0_gart_init()
956 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v8_0_gart_init()
957 adev->gart.gart_pte_flags = AMDGPU_PTE_EXECUTABLE; in gmc_v8_0_gart_init()
H A Dgmc_v9_0.c1828 if (adev->gart.bo) { in gmc_v9_0_gart_init()
1845 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v9_0_gart_init()
1846 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_VG10(0ULL, MTYPE_UC) | in gmc_v9_0_gart_init()
2369 if (adev->gart.bo == NULL) { in gmc_v9_0_gart_enable()
2392 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v9_0_gart_enable()
H A Dmmhub_v3_3.c150 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in mmhub_v3_3_init_gart_aperture_regs()
368 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in mmhub_v3_3_init_saw_regs()
H A Damdgpu_gmc.c660 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in amdgpu_gmc_flush_gpu_tlb()
1007 uint64_t flags = adev->gart.gart_pte_flags; //TODO it is UC. explore NC/RW? in amdgpu_gmc_init_pdb0()
1015 u64 gart_ptb_gpu_pa = amdgpu_gmc_vram_pa(adev, adev->gart.bo); in amdgpu_gmc_init_pdb0()
H A Dmmhub_v1_0.c70 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in mmhub_v1_0_init_gart_aperture_regs()
234 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in mmhub_v1_0_init_saw()
H A Dgfxhub_v3_0_3.c138 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in gfxhub_v3_0_3_init_gart_aperture_regs()
/linux/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_migrate.c78 dst_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in svm_migrate_gart_map()
89 pte_flags |= adev->gart.gart_pte_flags; in svm_migrate_gart_map()

12