Home
last modified time | relevance | path

Searched refs:page_to_phys (Results 1 – 25 of 99) sorted by relevance

1234

/linux/drivers/char/agp/
H A Duninorth-agp.c144 page_to_phys(agp_bridge->scratch_page_page) >> 12); in uninorth_configure()
187 gp[i] = (page_to_phys(mem->pages[i]) >> PAGE_SHIFT) | 0x80000000UL; in uninorth_insert_memory()
189 gp[i] = cpu_to_le32((page_to_phys(mem->pages[i]) & 0xFFFFF000UL) | in uninorth_insert_memory()
191 flush_dcache_range((unsigned long)__va(page_to_phys(mem->pages[i])), in uninorth_insert_memory()
192 (unsigned long)__va(page_to_phys(mem->pages[i]))+0x1000); in uninorth_insert_memory()
433 scratch_value = (page_to_phys(agp_bridge->scratch_page_page) >> PAGE_SHIFT) | 0x80000000UL; in uninorth_create_gatt_table()
435 scratch_value = cpu_to_le32((page_to_phys(agp_bridge->scratch_page_page) & 0xFFFFF000UL) | in uninorth_create_gatt_table()
H A Dali-agp.c155 page_to_phys(page)) | ALI_CACHE_FLUSH_EN )); in m1541_alloc_page()
183 page_to_phys(page)) | ALI_CACHE_FLUSH_EN)); in m1541_destroy_page()
/linux/arch/csky/mm/
H A Ddma-mapping.c18 void *start = __va(page_to_phys(page)); in cache_op()
55 cache_op(page_to_phys(page), size, dma_wbinv_set_zero_range); in arch_dma_prep_coherent()
/linux/include/asm-generic/
H A Dmemory_model.h77 #define page_to_phys(page) \ macro
85 #define page_to_phys(page) PFN_PHYS(page_to_pfn(page)) macro
/linux/kernel/dma/
H A Ddirect.c111 if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) { in dma_direct_alloc_swiotlb()
134 if (!dma_coherent_ok(dev, page_to_phys(page), size) || in __dma_direct_alloc_pages()
143 if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) { in __dma_direct_alloc_pages()
186 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc_from_pool()
204 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc_no_mapping()
302 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc()
375 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc_pages()
/linux/arch/loongarch/include/asm/
H A Dpage.h81 #define page_to_virt(page) __va(page_to_phys(page))
91 (__kfence_pool == NULL) ? __va(page_to_phys(page)) : page_address(page); \
/linux/arch/x86/virt/vmx/tdx/
H A Dtdx.c1502 return page_to_phys(td->tdr_page); in tdx_tdr_pa()
1507 return page_to_phys(td->tdvpr_page); in tdx_tdvpr_pa()
1532 .rcx = page_to_phys(tdcs_page), in tdh_mng_addcx()
1546 .r8 = page_to_phys(page), in tdh_mem_page_add()
1547 .r9 = page_to_phys(source), in tdh_mem_page_add()
1566 .r8 = page_to_phys(page), in tdh_mem_sept_add()
1583 .rcx = page_to_phys(tdcx_page), in tdh_vp_addcx()
1597 .r8 = page_to_phys(page), in tdh_mem_page_aug()
1803 .rcx = page_to_phys(page), in tdh_phymem_page_reclaim()
/linux/drivers/firmware/efi/
H A Dcapsule.c172 sglist_phys = page_to_phys(sg_pages[0]); in efi_capsule_update_locked()
267 sglist[j].data = page_to_phys(sg_pages[i + 1]); in efi_capsule_update()
/linux/arch/microblaze/mm/
H A Dconsistent.c20 phys_addr_t paddr = page_to_phys(page); in arch_dma_prep_coherent()
/linux/arch/m68k/kernel/
H A Ddma.c14 cache_push(page_to_phys(page), size); in arch_dma_prep_coherent()
/linux/drivers/virt/
H A Dfsl_hypervisor.c263 sg_list[0].source = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy()
267 sg_list[0].target = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy()
277 sg_list[i].source = page_to_phys(pages[i]); in ioctl_memcpy()
282 sg_list[i].target = page_to_phys(pages[i]); in ioctl_memcpy()
/linux/arch/arc/mm/
H A Ddma.c30 dma_cache_wback_inv(page_to_phys(page), size); in arch_dma_prep_coherent()
/linux/arch/xtensa/include/asm/
H A Dhighmem.h34 return DCACHE_ALIAS(page_to_phys(page)); in get_pkmap_color()
/linux/arch/m68k/mm/
H A Dcache.c117 : : "a" (page_to_phys(page))); in flush_icache_user_page()
/linux/tools/virtio/linux/
H A Ddma-mapping.h24 #define dma_map_page(d, p, o, s, dir) (page_to_phys(p) + (o))
H A Dscatterlist.h158 return page_to_phys(sg_page(sg)) + sg->offset; in sg_phys()
H A Dkernel.h46 #define page_to_phys(p) ((dma_addr_t)(unsigned long)(p)) macro
/linux/sound/virtio/
H A Dvirtio_pcm_msg.c56 phys_addr_t pg_address = page_to_phys(pg); in virtsnd_pcm_sg_num()
104 sg_phys(&sgs[idx]) + sgs[idx].length != page_to_phys(pg)) { in virtsnd_pcm_sg_from()
/linux/arch/parisc/kernel/
H A Dkexec.c80 unsigned long phys = page_to_phys(image->control_code_page); in machine_kexec()
/linux/arch/arm/include/asm/
H A Dpgalloc.h144 __pmd_populate(pmdp, page_to_phys(ptep), prot); in pmd_populate()
/linux/scripts/gdb/linux/
H A Dmm.py263 def page_to_phys(self, page): member in aarch64_page_ops
271 return self.__va(self.page_to_phys(page))
344 phys_addr = page_ops().ops.page_to_phys(page)
/linux/arch/riscv/mm/
H A Ddma-noncoherent.c123 noncoherent_cache_ops.wback_inv(page_to_phys(page), size); in arch_dma_prep_coherent()
/linux/arch/arm/mm/
H A Ddma-mapping.c225 page_to_phys(page), in atomic_pool_init()
589 *handle = phys_to_dma(dev, page_to_phys(page)); in __dma_alloc()
686 paddr = page_to_phys(page) + off; in __dma_page_cpu_to_dev()
698 phys_addr_t paddr = page_to_phys(page) + off; in __dma_page_dev_to_cpu()
979 phys_addr_t phys = page_to_phys(pages[i]); in __iommu_create_mapping()
1204 phys_addr_t phys = page_to_phys(sg_page(s)); in __map_sg_chunk()
1385 ret = iommu_map(mapping->domain, dma_addr, page_to_phys(page), len, in arm_iommu_map_page()
/linux/tools/testing/scatterlist/linux/
H A Dmm.h45 static inline unsigned long page_to_phys(struct page *page) in page_to_phys() function
/linux/arch/riscv/kernel/
H A Dsbi.c600 base_addr = page_to_phys(vmalloc_to_page(bytes)) + in sbi_debug_console_write()
629 base_addr = page_to_phys(vmalloc_to_page(bytes)) + in sbi_debug_console_read()

1234