xref: /linux/arch/x86/mm/physaddr.c (revision a1c3be890440a1769ed6f822376a3e3ab0d42994)
1 // SPDX-License-Identifier: GPL-2.0
2 #include <linux/memblock.h>
3 #include <linux/mmdebug.h>
4 #include <linux/export.h>
5 #include <linux/mm.h>
6 
7 #include <asm/page.h>
8 #include <linux/vmalloc.h>
9 
10 #include "physaddr.h"
11 
12 #ifdef CONFIG_X86_64
13 
14 #ifdef CONFIG_DEBUG_VIRTUAL
15 unsigned long __phys_addr(unsigned long x)
16 {
17 	unsigned long y = x - __START_KERNEL_map;
18 
19 	/* use the carry flag to determine if x was < __START_KERNEL_map */
20 	if (unlikely(x > y)) {
21 		x = y + phys_base;
22 
23 		VIRTUAL_BUG_ON(y >= KERNEL_IMAGE_SIZE);
24 	} else {
25 		x = y + (__START_KERNEL_map - PAGE_OFFSET);
26 
27 		/* carry flag will be set if starting x was >= PAGE_OFFSET */
28 		VIRTUAL_BUG_ON((x > y) || !phys_addr_valid(x));
29 	}
30 
31 	return x;
32 }
33 EXPORT_SYMBOL(__phys_addr);
34 
35 unsigned long __phys_addr_symbol(unsigned long x)
36 {
37 	unsigned long y = x - __START_KERNEL_map;
38 
39 	/* only check upper bounds since lower bounds will trigger carry */
40 	VIRTUAL_BUG_ON(y >= KERNEL_IMAGE_SIZE);
41 
42 	return y + phys_base;
43 }
44 EXPORT_SYMBOL(__phys_addr_symbol);
45 #endif
46 
47 bool __virt_addr_valid(unsigned long x)
48 {
49 	unsigned long y = x - __START_KERNEL_map;
50 
51 	/* use the carry flag to determine if x was < __START_KERNEL_map */
52 	if (unlikely(x > y)) {
53 		x = y + phys_base;
54 
55 		if (y >= KERNEL_IMAGE_SIZE)
56 			return false;
57 	} else {
58 		x = y + (__START_KERNEL_map - PAGE_OFFSET);
59 
60 		/* carry flag will be set if starting x was >= PAGE_OFFSET */
61 		if ((x > y) || !phys_addr_valid(x))
62 			return false;
63 	}
64 
65 	return pfn_valid(x >> PAGE_SHIFT);
66 }
67 EXPORT_SYMBOL(__virt_addr_valid);
68 
69 #else
70 
71 #ifdef CONFIG_DEBUG_VIRTUAL
72 unsigned long __phys_addr(unsigned long x)
73 {
74 	unsigned long phys_addr = x - PAGE_OFFSET;
75 	/* VMALLOC_* aren't constants  */
76 	VIRTUAL_BUG_ON(x < PAGE_OFFSET);
77 	VIRTUAL_BUG_ON(__vmalloc_start_set && is_vmalloc_addr((void *) x));
78 	/* max_low_pfn is set early, but not _that_ early */
79 	if (max_low_pfn) {
80 		VIRTUAL_BUG_ON((phys_addr >> PAGE_SHIFT) > max_low_pfn);
81 		BUG_ON(slow_virt_to_phys((void *)x) != phys_addr);
82 	}
83 	return phys_addr;
84 }
85 EXPORT_SYMBOL(__phys_addr);
86 #endif
87 
88 bool __virt_addr_valid(unsigned long x)
89 {
90 	if (x < PAGE_OFFSET)
91 		return false;
92 	if (__vmalloc_start_set && is_vmalloc_addr((void *) x))
93 		return false;
94 	if (x >= FIXADDR_START)
95 		return false;
96 	return pfn_valid((x - PAGE_OFFSET) >> PAGE_SHIFT);
97 }
98 EXPORT_SYMBOL(__virt_addr_valid);
99 
100 #endif	/* CONFIG_X86_64 */
101