| /linux/arch/xtensa/mm/ |
| H A D | cache.c | 62 if (!DCACHE_ALIAS_EQ(page_to_phys(page), vaddr)) { in kmap_invalidate_coherent() 71 (page_to_phys(page) & DCACHE_ALIAS_MASK); in kmap_invalidate_coherent() 75 page_to_phys(page)); in kmap_invalidate_coherent() 84 *paddr = page_to_phys(page); in coherent_kvaddr() 204 unsigned long phys = page_to_phys(pfn_to_page(pfn)); in local_flush_cache_page() 277 unsigned long phys = page_to_phys(page); in copy_to_user_page() 317 unsigned long phys = page_to_phys(page); in copy_from_user_page()
|
| /linux/drivers/char/agp/ |
| H A D | uninorth-agp.c | 144 page_to_phys(agp_bridge->scratch_page_page) >> 12); in uninorth_configure() 187 gp[i] = (page_to_phys(mem->pages[i]) >> PAGE_SHIFT) | 0x80000000UL; in uninorth_insert_memory() 189 gp[i] = cpu_to_le32((page_to_phys(mem->pages[i]) & 0xFFFFF000UL) | in uninorth_insert_memory() 191 flush_dcache_range((unsigned long)__va(page_to_phys(mem->pages[i])), in uninorth_insert_memory() 192 (unsigned long)__va(page_to_phys(mem->pages[i]))+0x1000); in uninorth_insert_memory() 433 scratch_value = (page_to_phys(agp_bridge->scratch_page_page) >> PAGE_SHIFT) | 0x80000000UL; in uninorth_create_gatt_table() 435 scratch_value = cpu_to_le32((page_to_phys(agp_bridge->scratch_page_page) & 0xFFFFF000UL) | in uninorth_create_gatt_table()
|
| H A D | ali-agp.c | 155 page_to_phys(page)) | ALI_CACHE_FLUSH_EN )); in m1541_alloc_page() 183 page_to_phys(page)) | ALI_CACHE_FLUSH_EN)); in m1541_destroy_page()
|
| /linux/arch/csky/mm/ |
| H A D | dma-mapping.c | 18 void *start = __va(page_to_phys(page)); in cache_op() 55 cache_op(page_to_phys(page), size, dma_wbinv_set_zero_range); in arch_dma_prep_coherent()
|
| /linux/include/asm-generic/ |
| H A D | memory_model.h | 77 #define page_to_phys(page) \ macro 85 #define page_to_phys(page) PFN_PHYS(page_to_pfn(page)) macro
|
| /linux/tools/virtio/linux/ |
| H A D | dma-mapping.h | 24 #define dma_map_page(d, p, o, s, dir) (page_to_phys(p) + (o)) 25 #define dma_map_page_attrs(d, p, o, s, dir, a) (page_to_phys(p) + (o))
|
| H A D | scatterlist.h | 158 return page_to_phys(sg_page(sg)) + sg->offset; in sg_phys()
|
| H A D | kernel.h | 47 #define page_to_phys(p) ((dma_addr_t)(unsigned long)(p)) macro
|
| /linux/kernel/dma/ |
| H A D | direct.c | 111 if (page && !dma_coherent_ok(dev, page_to_phys(page), size)) { in dma_direct_alloc_swiotlb() 134 if (dma_coherent_ok(dev, page_to_phys(page), size) && in __dma_direct_alloc_pages() 142 && !dma_coherent_ok(dev, page_to_phys(page), size)) { in __dma_direct_alloc_pages() 181 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc_from_pool() 199 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc_no_mapping() 297 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc() 370 *dma_handle = phys_to_dma_direct(dev, page_to_phys(page)); in dma_direct_alloc_pages()
|
| H A D | ops_helpers.c | 75 phys = page_to_phys(page); in dma_common_alloc_pages()
|
| /linux/arch/loongarch/include/asm/ |
| H A D | page.h | 81 #define page_to_virt(page) __va(page_to_phys(page)) 91 (__kfence_pool == NULL) ? __va(page_to_phys(page)) : page_address(page); \
|
| H A D | io.h | 74 page_to_phys(tlb_virt_to_page((unsigned long)kaddr)) + offset_in_page((unsigned long)kaddr);\
|
| /linux/arch/microblaze/mm/ |
| H A D | consistent.c | 20 phys_addr_t paddr = page_to_phys(page); in arch_dma_prep_coherent()
|
| /linux/drivers/firmware/efi/ |
| H A D | capsule.c | 172 sglist_phys = page_to_phys(sg_pages[0]); in efi_capsule_update_locked() 267 sglist[j].data = page_to_phys(sg_pages[i + 1]); in efi_capsule_update()
|
| /linux/arch/m68k/kernel/ |
| H A D | dma.c | 14 cache_push(page_to_phys(page), size); in arch_dma_prep_coherent()
|
| /linux/arch/arc/mm/ |
| H A D | dma.c | 30 dma_cache_wback_inv(page_to_phys(page), size); in arch_dma_prep_coherent()
|
| /linux/drivers/virt/ |
| H A D | fsl_hypervisor.c | 263 sg_list[0].source = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy() 267 sg_list[0].target = page_to_phys(pages[0]) + lb_offset; in ioctl_memcpy() 277 sg_list[i].source = page_to_phys(pages[i]); in ioctl_memcpy() 282 sg_list[i].target = page_to_phys(pages[i]); in ioctl_memcpy()
|
| /linux/arch/openrisc/kernel/ |
| H A D | patching.c | 30 phys = page_to_phys(page) + offset_in_page(addr); in patch_map()
|
| /linux/arch/xtensa/include/asm/ |
| H A D | highmem.h | 34 return DCACHE_ALIAS(page_to_phys(page)); in get_pkmap_color()
|
| /linux/arch/m68k/mm/ |
| H A D | cache.c | 117 : : "a" (page_to_phys(page))); in flush_icache_user_page()
|
| /linux/arch/parisc/kernel/ |
| H A D | kexec.c | 80 unsigned long phys = page_to_phys(image->control_code_page); in machine_kexec()
|
| /linux/arch/arm/include/asm/ |
| H A D | pgalloc.h | 144 __pmd_populate(pmdp, page_to_phys(ptep), prot); in pmd_populate()
|
| /linux/arch/riscv/mm/ |
| H A D | dma-noncoherent.c | 123 noncoherent_cache_ops.wback_inv(page_to_phys(page), size); in arch_dma_prep_coherent()
|
| /linux/scripts/gdb/linux/ |
| H A D | mm.py | 263 def page_to_phys(self, page): member in aarch64_page_ops 271 return self.__va(self.page_to_phys(page)) 344 phys_addr = page_ops().ops.page_to_phys(page)
|
| /linux/tools/testing/scatterlist/linux/ |
| H A D | mm.h | 45 static inline unsigned long page_to_phys(struct page *page) in page_to_phys() function
|