Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 25 of 463) sorted by relevance

12345678910>>...19

/linux/include/linux/
H A Dpfn_t.h29 static inline pfn_t __pfn_to_pfn_t(unsigned long pfn, u64 flags) in __pfn_to_pfn_t() argument
31 pfn_t pfn_t = { .val = pfn | (flags & PFN_FLAGS_MASK), }; in __pfn_to_pfn_t()
37 static inline pfn_t pfn_to_pfn_t(unsigned long pfn) in pfn_to_pfn_t() argument
39 return __pfn_to_pfn_t(pfn, 0); in pfn_to_pfn_t()
47 static inline bool pfn_t_has_page(pfn_t pfn) in pfn_t_has_page() argument
49 return (pfn.val & PFN_MAP) == PFN_MAP || (pfn.val & PFN_DEV) == 0; in pfn_t_has_page()
52 static inline unsigned long pfn_t_to_pfn(pfn_t pfn) in pfn_t_to_pfn() argument
54 return pfn.val & ~PFN_FLAGS_MASK; in pfn_t_to_pfn()
57 static inline struct page *pfn_t_to_page(pfn_t pfn) in pfn_t_to_page() argument
59 if (pfn_t_has_page(pfn)) in pfn_t_to_page()
[all …]
H A Dpageblock-flags.h60 #define pageblock_align(pfn) ALIGN((pfn), pageblock_nr_pages) argument
61 #define pageblock_aligned(pfn) IS_ALIGNED((pfn), pageblock_nr_pages) argument
62 #define pageblock_start_pfn(pfn) ALIGN_DOWN((pfn), pageblock_nr_pages) argument
63 #define pageblock_end_pfn(pfn) ALIGN((pfn) + 1, pageblock_nr_pages) argument
69 unsigned long pfn,
74 unsigned long pfn,
/linux/arch/x86/xen/
H A Dp2m.c59 * PFN is that we could find ourselves where pfn_to_mfn(pfn)==pfn for a
60 * non-identity pfn. To protect ourselves against we elect to set (and get) the
127 static inline unsigned p2m_top_index(unsigned long pfn) in p2m_top_index() argument
129 BUG_ON(pfn >= MAX_P2M_PFN); in p2m_top_index()
130 return pfn / (P2M_MID_PER_PAGE * P2M_PER_PAGE); in p2m_top_index()
133 static inline unsigned p2m_mid_index(unsigned long pfn) in p2m_mid_index() argument
135 return (pfn / P2M_PER_PAGE) % P2M_MID_PER_PAGE; in p2m_mid_index()
170 static void p2m_init_identity(unsigned long *p2m, unsigned long pfn) in p2m_init_identity() argument
175 p2m[i] = IDENTITY_FRAME(pfn in p2m_init_identity()
215 unsigned long pfn, mfn; xen_build_mfn_list_list() local
293 unsigned long pfn; xen_build_dynamic_phys_to_machine() local
309 xen_p2m_elem_type(unsigned long pfn) xen_p2m_elem_type() argument
330 unsigned long pfn; xen_rebuild_p2m_list() local
431 get_phys_to_machine(unsigned long pfn) get_phys_to_machine() argument
527 xen_alloc_p2m_entry(unsigned long pfn) xen_alloc_p2m_entry() argument
628 unsigned long pfn; set_phys_range_identity() local
645 __set_phys_to_machine(unsigned long pfn,unsigned long mfn) __set_phys_to_machine() argument
673 set_phys_to_machine(unsigned long pfn,unsigned long mfn) set_phys_to_machine() argument
706 unsigned long mfn, pfn; set_foreign_p2m_mapping() local
783 unsigned long pfn = page_to_pfn(pages[i]); clear_foreign_p2m_mapping() local
816 unsigned long pfn, mfn, end_pfn; xen_do_remap_nonram() local
902 unsigned long pfn, first_pfn; p2m_dump_show() local
[all...]
/linux/mm/
H A Dpage_isolation.c38 unsigned long pfn; in has_unmovable_pages() local
55 for (pfn = start_pfn; pfn < end_pfn; pfn++) { in has_unmovable_pages()
56 page = pfn_to_page(pfn); in has_unmovable_pages()
93 pfn += skip_pages - 1; in has_unmovable_pages()
105 pfn += (1 << buddy_order(page)) - 1; in has_unmovable_pages()
269 __first_valid_page(unsigned long pfn, unsigned long nr_pages) in __first_valid_page() argument
276 page = pfn_to_online_page(pfn + i); in __first_valid_page()
312 unsigned long pfn; in isolate_single_pageblock() local
365 for (pfn = start_pfn; pfn < boundary_pfn;) { in isolate_single_pageblock()
366 struct page *page = __first_valid_page(pfn, boundary_pfn - pfn); in isolate_single_pageblock()
[all …]
H A Dpage_ext.c170 unsigned long pfn = page_to_pfn(page); in lookup_page_ext() local
184 index = pfn - round_down(node_start_pfn(page_to_nid(page)), in lookup_page_ext()
250 unsigned long pfn = page_to_pfn(page); in lookup_page_ext() local
251 struct mem_section *section = __pfn_to_section(pfn); in lookup_page_ext()
263 return get_entry(page_ext, pfn); in lookup_page_ext()
283 static int __meminit init_section_page_ext(unsigned long pfn, int nid) in init_section_page_ext() argument
289 section = __pfn_to_section(pfn); in init_section_page_ext()
313 pfn &= PAGE_SECTION_MASK; in init_section_page_ext()
314 section->page_ext = (void *)base - page_ext_size * pfn; in init_section_page_ext()
337 static void __free_page_ext(unsigned long pfn) in __free_page_ext() argument
[all …]
H A Dmemory_hotplug.c318 static int check_pfn_span(unsigned long pfn, unsigned long nr_pages) in check_pfn_span() argument
335 if (!IS_ALIGNED(pfn | nr_pages, min_align)) in check_pfn_span()
345 struct page *pfn_to_online_page(unsigned long pfn) in pfn_to_online_page() argument
347 unsigned long nr = pfn_to_section_nr(pfn); in pfn_to_online_page()
362 if (IS_ENABLED(CONFIG_HAVE_ARCH_PFN_VALID) && !pfn_valid(pfn)) in pfn_to_online_page()
365 if (!pfn_section_valid(ms, pfn)) in pfn_to_online_page()
369 return pfn_to_page(pfn); in pfn_to_online_page()
377 pgmap = get_dev_pagemap(pfn, NULL); in pfn_to_online_page()
384 return pfn_to_page(pfn); in pfn_to_online_page()
388 int __add_pages(int nid, unsigned long pfn, unsigned long nr_pages, in __add_pages() argument
[all …]
H A Dmemory-failure.c79 void num_poisoned_pages_inc(unsigned long pfn) in num_poisoned_pages_inc() argument
82 memblk_nr_poison_inc(pfn); in num_poisoned_pages_inc()
85 void num_poisoned_pages_sub(unsigned long pfn, long i) in num_poisoned_pages_sub() argument
88 if (pfn != -1UL) in num_poisoned_pages_sub()
89 memblk_nr_poison_sub(pfn, i); in num_poisoned_pages_sub()
350 static int kill_proc(struct to_kill *tk, unsigned long pfn, int flags) in kill_proc() argument
357 pfn, t->comm, task_pid_nr(t)); in kill_proc()
527 unsigned long pfn, int flags) in kill_procs() argument
535 pfn, tk->tsk->comm, task_pid_nr(tk->tsk)); in kill_procs()
546 else if (kill_proc(tk, pfn, flags) < 0) in kill_procs()
[all …]
H A Dpage_idle.c32 * This function tries to get a user memory page by pfn as described above.
34 static struct folio *page_idle_get_folio(unsigned long pfn) in page_idle_get_folio() argument
36 struct page *page = pfn_to_online_page(pfn); in page_idle_get_folio()
120 unsigned long pfn, end_pfn; in page_idle_bitmap_read() local
126 pfn = pos * BITS_PER_BYTE; in page_idle_bitmap_read()
127 if (pfn >= max_pfn) in page_idle_bitmap_read()
130 end_pfn = pfn + count * BITS_PER_BYTE; in page_idle_bitmap_read()
134 for (; pfn < end_pfn; pfn++) { in page_idle_bitmap_read()
135 bit = pfn in page_idle_bitmap_read()
165 unsigned long pfn, end_pfn; page_idle_bitmap_write() local
[all...]
H A Dpage_owner.c419 unsigned long pfn, block_end_pfn; in pagetypeinfo_showmixedcount_print()
426 pfn = zone->zone_start_pfn; in pagetypeinfo_showmixedcount_print()
433 for (; pfn < end_pfn; ) { in pagetypeinfo_showmixedcount_print()
434 page = pfn_to_online_page(pfn); in pagetypeinfo_showmixedcount_print()
436 pfn = ALIGN(pfn + 1, MAX_ORDER_NR_PAGES); in pagetypeinfo_showmixedcount_print()
440 block_end_pfn = pageblock_end_pfn(pfn); in pagetypeinfo_showmixedcount_print()
445 for (; pfn < block_end_pfn; pfn++) { in pagetypeinfo_showmixedcount_print()
447 page = pfn_to_page(pfn); in pagetypeinfo_showmixedcount_print()
424 unsigned long pfn, block_end_pfn; pagetypeinfo_showmixedcount_print() local
541 print_page_owner(char __user * buf,size_t count,unsigned long pfn,struct page * page,struct page_owner * page_owner,depot_stack_handle_t handle) print_page_owner() argument
655 unsigned long pfn; read_page_owner() local
767 unsigned long pfn = zone->zone_start_pfn; init_pages_in_zone() local
[all...]
/linux/arch/x86/include/asm/xen/
H A Dpage.h56 extern int xen_alloc_p2m_entry(unsigned long pfn);
58 extern unsigned long get_phys_to_machine(unsigned long pfn);
59 extern bool set_phys_to_machine(unsigned long pfn, unsigned long mfn);
60 extern bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
133 static inline unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
137 if (pfn < xen_p2m_size) in __pfn_to_mfn()
138 mfn = xen_p2m_addr[pfn]; in __pfn_to_mfn()
139 else if (unlikely(pfn < xen_max_p2m_pfn)) in __pfn_to_mfn()
140 return get_phys_to_machine(pfn); in __pfn_to_mfn()
142 return IDENTITY_FRAME(pfn); in __pfn_to_mfn()
[all …]
/linux/tools/testing/scatterlist/
H A Dmain.c11 unsigned *pfn; member
28 #define pfn(...) (unsigned []){ __VA_ARGS__ } macro
42 printf(" %x", test->pfn[i]); in fail()
56 { -EINVAL, 1, pfn(0), NULL, PAGE_SIZE, 0, 1 }, in main()
57 { 0, 1, pfn(0), NULL, PAGE_SIZE, PAGE_SIZE + 1, 1 }, in main()
58 { 0, 1, pfn(0), NULL, PAGE_SIZE, sgmax, 1 }, in main()
59 { 0, 1, pfn(0), NULL, 1, sgmax, 1 }, in main()
60 { 0, 2, pfn(0, 1), NULL, 2 * PAGE_SIZE, sgmax, 1 }, in main()
61 { 0, 2, pfn(1, 0), NULL, 2 * PAGE_SIZE, sgmax, 2 }, in main()
62 { 0, 3, pfn(0, 1, 2), NULL, 3 * PAGE_SIZE, sgmax, 1 }, in main()
[all …]
/linux/arch/arm/xen/
H A Dp2m.c23 unsigned long pfn; member
44 if (new->pfn == entry->pfn) in xen_add_phys_to_mach_entry()
47 if (new->pfn < entry->pfn) in xen_add_phys_to_mach_entry()
59 __func__, &new->pfn, &new->mfn, &entry->pfn, &entry->mfn); in xen_add_phys_to_mach_entry()
64 unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
74 if (entry->pfn <= pfn && in __pfn_to_mfn()
75 entry->pfn + entry->nr_pages > pfn) { in __pfn_to_mfn()
76 unsigned long mfn = entry->mfn + (pfn - entry->pfn); in __pfn_to_mfn()
80 if (pfn < entry->pfn) in __pfn_to_mfn()
150 bool __set_phys_to_machine_multi(unsigned long pfn, in __set_phys_to_machine_multi() argument
[all …]
/linux/include/trace/events/
H A Dksm.h132 TP_PROTO(unsigned long pfn, void *rmap_item, void *mm, int err),
134 TP_ARGS(pfn, rmap_item, mm, err),
137 __field(unsigned long, pfn)
144 __entry->pfn = pfn;
151 __entry->pfn, __entry->rmap_item, __entry->mm, __entry->err)
167 TP_PROTO(void *ksm_page, unsigned long pfn, void *rmap_item, void *mm, int err),
169 TP_ARGS(ksm_page, pfn, rmap_item, mm, err),
173 __field(unsigned long, pfn)
181 __entry->pfn = pfn;
189 __entry->pfn, __entry->rmap_item, __entry->mm, __entry->err)
[all …]
H A Dcma.h13 TP_PROTO(const char *name, unsigned long pfn, const struct page *page,
16 TP_ARGS(name, pfn, page, count),
20 __field(unsigned long, pfn)
27 __entry->pfn = pfn;
32 TP_printk("name=%s pfn=0x%lx page=%p count=%lu",
34 __entry->pfn,
65 TP_PROTO(const char *name, unsigned long pfn, const struct page *page,
68 TP_ARGS(name, pfn, page, count, align, errorno),
72 __field(unsigned long, pfn)
[all...]
H A Dkmem.h143 __field( unsigned long, pfn )
148 __entry->pfn = page_to_pfn(page);
153 pfn_to_page(__entry->pfn),
154 __entry->pfn,
165 __field( unsigned long, pfn )
169 __entry->pfn = page_to_pfn(page);
173 pfn_to_page(__entry->pfn),
174 __entry->pfn)
185 __field( unsigned long, pfn )
192 __entry->pfn = page ? page_to_pfn(page) : -1UL;
[all …]
/linux/scripts/gdb/linux/
H A Dpage_owner.py56 pfn = int(argv[1])
57 self.read_page_owner_by_addr(self.p_ops.pfn_to_page(pfn))
80 pfn = self.p_ops.page_to_pfn(page)
81 section = self.p_ops.pfn_to_section(pfn)
85 return self.get_entry(page_ext, pfn)
100 pfn = self.p_ops.page_to_pfn(page)
102 if pfn < self.min_pfn or pfn > self.max_pfn or (not self.p_ops.pfn_valid(pfn)):
106 page = self.p_ops.pfn_to_page(pfn)
130 gdb.write("PFN: %d, Flags: 0x%x\n" % (pfn, page['flags']))
145 pfn = self.min_pfn
[all …]
H A Dmm.py138 def pfn_to_section_nr(self, pfn): argument
139 return pfn >> self.PFN_SECTION_SHIFT
144 def __pfn_to_section(self, pfn): argument
145 return self.__nr_to_section(self.pfn_to_section_nr(pfn))
147 def pfn_to_section(self, pfn): argument
148 return self.__pfn_to_section(pfn)
150 def subsection_map_index(self, pfn): argument
151 return (pfn & ~(self.PAGE_SECTION_MASK)) // self.PAGES_PER_SUBSECTION
153 def pfn_section_valid(self, ms, pfn): argument
155 idx = self.subsection_map_index(pfn)
[all …]
/linux/arch/arm/mach-omap2/
H A Dio.c68 .pfn = __phys_to_pfn(L3_24XX_PHYS),
74 .pfn = __phys_to_pfn(L4_24XX_PHYS),
84 .pfn = __phys_to_pfn(DSP_MEM_2420_PHYS),
90 .pfn = __phys_to_pfn(DSP_IPI_2420_PHYS),
96 .pfn = __phys_to_pfn(DSP_MMU_2420_PHYS),
108 .pfn = __phys_to_pfn(L4_WK_243X_PHYS),
114 .pfn = __phys_to_pfn(OMAP243X_GPMC_PHYS),
120 .pfn = __phys_to_pfn(OMAP243X_SDRC_PHYS),
126 .pfn = __phys_to_pfn(OMAP243X_SMS_PHYS),
138 .pfn = __phys_to_pfn(L3_34XX_PHYS),
[all …]
/linux/arch/arm/mm/
H A Dflush.c38 static void flush_pfn_alias(unsigned long pfn, unsigned long vaddr) in flush_pfn_alias() argument
43 set_top_pte(to, pfn_pte(pfn, PAGE_KERNEL)); in flush_pfn_alias()
52 static void flush_icache_alias(unsigned long pfn, unsigned long vaddr, unsigned long len) in flush_icache_alias() argument
58 set_top_pte(va, pfn_pte(pfn, PAGE_KERNEL)); in flush_icache_alias()
98 void flush_cache_pages(struct vm_area_struct *vma, unsigned long user_addr, unsigned long pfn, unsi… in flush_cache_pages() argument
101 vivt_flush_cache_pages(vma, user_addr, pfn, nr); in flush_cache_pages()
106 flush_pfn_alias(pfn, user_addr); in flush_cache_pages()
115 #define flush_pfn_alias(pfn,vaddr) do { } while (0) argument
116 #define flush_icache_alias(pfn,vaddr,len) do { } while (0) argument
255 unsigned long start, offset, pfn; in __flush_dcache_aliases() local
[all …]
H A Dfault-armv.c37 unsigned long pfn, pte_t *ptep) in do_adjust_pte() argument
52 flush_cache_page(vma, address, pfn); in do_adjust_pte()
53 outer_flush_range((pfn << PAGE_SHIFT), in do_adjust_pte()
54 (pfn << PAGE_SHIFT) + PAGE_SIZE); in do_adjust_pte()
65 unsigned long pfn, bool need_lock) in adjust_pte() argument
114 ret = do_adjust_pte(vma, address, pfn, pte); in adjust_pte()
125 unsigned long addr, pte_t *ptep, unsigned long pfn) in make_coherent() argument
167 aliases += adjust_pte(mpnt, mpnt_addr, pfn, need_lock); in make_coherent()
171 do_adjust_pte(vma, addr, pfn, ptep); in make_coherent()
190 unsigned long pfn = pte_pfn(*ptep); in update_mmu_cache_range() local
[all …]
/linux/include/xen/arm/
H A Dpage.h15 #define phys_to_machine_mapping_valid(pfn) (1) argument
43 unsigned long __pfn_to_mfn(unsigned long pfn);
47 static inline unsigned long pfn_to_gfn(unsigned long pfn) in pfn_to_gfn() argument
49 return pfn; in pfn_to_gfn()
58 static inline unsigned long pfn_to_bfn(unsigned long pfn) in pfn_to_bfn() argument
63 mfn = __pfn_to_mfn(pfn); in pfn_to_bfn()
68 return pfn; in pfn_to_bfn()
103 bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
104 bool __set_phys_to_machine_multi(unsigned long pfn, unsigned long mfn,
107 static inline bool set_phys_to_machine(unsigned long pfn, unsigned long mfn) in set_phys_to_machine() argument
[all …]
/linux/kernel/power/
H A Dsnapshot.c745 * Walk the radix tree to find the page containing the bit that represents @pfn
748 static int memory_bm_find_bit(struct memory_bitmap *bm, unsigned long pfn, in memory_bm_find_bit() argument
757 if (pfn >= zone->start_pfn && pfn < zone->end_pfn) in memory_bm_find_bit()
764 if (pfn >= curr->start_pfn && pfn < curr->end_pfn) { in memory_bm_find_bit()
781 * pfn falls into the current node then we do not need to walk in memory_bm_find_bit()
786 ((pfn - zone->start_pfn) & ~BM_BLOCK_MASK) == bm->cur.node_pfn) in memory_bm_find_bit()
790 block_nr = (pfn - zone->start_pfn) >> BM_BLOCK_SHIFT; in memory_bm_find_bit()
805 bm->cur.node_pfn = (pfn in memory_bm_find_bit()
815 memory_bm_set_bit(struct memory_bitmap * bm,unsigned long pfn) memory_bm_set_bit() argument
826 mem_bm_set_bit_check(struct memory_bitmap * bm,unsigned long pfn) mem_bm_set_bit_check() argument
839 memory_bm_clear_bit(struct memory_bitmap * bm,unsigned long pfn) memory_bm_clear_bit() argument
863 memory_bm_test_bit(struct memory_bitmap * bm,unsigned long pfn) memory_bm_test_bit() argument
874 memory_bm_pfn_present(struct memory_bitmap * bm,unsigned long pfn) memory_bm_pfn_present() argument
931 unsigned long bits, pfn, pages; memory_bm_next_pfn() local
1093 unsigned long pfn; mark_nosave_pages() local
1201 unsigned long pfn; clear_or_poison_free_pages() local
1250 unsigned long pfn, max_zone_pfn, page_count = WD_PAGE_COUNT; mark_free_pages() local
1321 saveable_highmem_page(struct zone * zone,unsigned long pfn) saveable_highmem_page() argument
1355 unsigned long pfn, max_zone_pfn; count_highmem_pages() local
1380 saveable_page(struct zone * zone,unsigned long pfn) saveable_page() argument
1415 unsigned long pfn, max_zone_pfn; count_data_pages() local
1472 page_is_saveable(struct zone * zone,unsigned long pfn) page_is_saveable() argument
1509 page_is_saveable(zone,pfn) global() argument
1530 unsigned long pfn, copy_pfn; copy_data_pages() local
1756 unsigned long pfn = memory_bm_next_pfn(&copy_bm); free_unnecessary_pages() local
2291 unsigned long pfn; duplicate_memory_bitmap() local
2309 unsigned long pfn; mark_unsafe_pages() local
2423 unsigned long pfn; count_highmem_image_pages() local
2725 unsigned long pfn = memory_bm_next_pfn(bm); get_buffer() local
[all...]
/linux/arch/x86/mm/
H A Dinit_32.c259 unsigned long pfn; in kernel_physical_mapping_init() local
290 pfn = start_pfn; in kernel_physical_mapping_init()
291 pgd_idx = pgd_index((pfn<<PAGE_SHIFT) + PAGE_OFFSET); in kernel_physical_mapping_init()
296 if (pfn >= end_pfn) in kernel_physical_mapping_init()
299 pmd_idx = pmd_index((pfn<<PAGE_SHIFT) + PAGE_OFFSET); in kernel_physical_mapping_init()
304 for (; pmd_idx < PTRS_PER_PMD && pfn < end_pfn; in kernel_physical_mapping_init()
306 unsigned int addr = pfn * PAGE_SIZE + PAGE_OFFSET; in kernel_physical_mapping_init()
323 pfn &= PMD_MASK >> PAGE_SHIFT; in kernel_physical_mapping_init()
324 addr2 = (pfn + PTRS_PER_PTE-1) * PAGE_SIZE + in kernel_physical_mapping_init()
333 set_pmd(pmd, pfn_pmd(pfn, init_prot)); in kernel_physical_mapping_init()
[all …]
/linux/arch/powerpc/platforms/powernv/
H A Dmemtrace.c94 unsigned long pfn, start_pfn; in memtrace_clear_range() local
119 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) in memtrace_alloc_node()
120 __SetPageOffline(pfn_to_page(pfn)); in memtrace_alloc_node()
199 unsigned long pfn; in memtrace_init_debugfs()
206 for (pfn = start_pfn; pfn < start_pfn + nr_pages; pfn++) in memtrace_init_debugfs()
207 __ClearPageOffline(pfn_to_page(pfn)); in memtrace_init_debugfs()
114 unsigned long pfn, start_pfn; memtrace_alloc_node() local
218 unsigned long pfn; memtrace_free() local
[all...]
/linux/drivers/ras/
H A Dcec.c186 static int __find_elem(struct ce_array *ca, u64 pfn, unsigned int *to) in __find_elem() argument
196 if (this_pfn < pfn) in __find_elem()
198 else if (this_pfn > pfn) in __find_elem()
200 else if (this_pfn == pfn) { in __find_elem()
223 static int find_elem(struct ce_array *ca, u64 pfn, unsigned int *to) in find_elem() argument
231 return __find_elem(ca, pfn, to); in find_elem()
271 u64 pfn; in del_lru_elem() local
277 pfn = del_lru_elem_unlocked(ca); in del_lru_elem()
280 return pfn; in del_lru_elem()
321 static int cec_add_elem(u64 pfn) in cec_add_elem() argument
[all …]

12345678910>>...19