xref: /linux/arch/arm/include/asm/arm_pmuv3.h (revision 3fd6c59042dbba50391e30862beac979491145fe)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2012 ARM Ltd.
4  */
5 
6 #ifndef __ASM_PMUV3_H
7 #define __ASM_PMUV3_H
8 
9 #include <asm/cp15.h>
10 #include <asm/cputype.h>
11 
12 #define PMCCNTR			__ACCESS_CP15_64(0, c9)
13 
14 #define PMCR			__ACCESS_CP15(c9,  0, c12, 0)
15 #define PMCNTENSET		__ACCESS_CP15(c9,  0, c12, 1)
16 #define PMCNTENCLR		__ACCESS_CP15(c9,  0, c12, 2)
17 #define PMOVSR			__ACCESS_CP15(c9,  0, c12, 3)
18 #define PMSELR			__ACCESS_CP15(c9,  0, c12, 5)
19 #define PMCEID0			__ACCESS_CP15(c9,  0, c12, 6)
20 #define PMCEID1			__ACCESS_CP15(c9,  0, c12, 7)
21 #define PMXEVTYPER		__ACCESS_CP15(c9,  0, c13, 1)
22 #define PMXEVCNTR		__ACCESS_CP15(c9,  0, c13, 2)
23 #define PMUSERENR		__ACCESS_CP15(c9,  0, c14, 0)
24 #define PMINTENSET		__ACCESS_CP15(c9,  0, c14, 1)
25 #define PMINTENCLR		__ACCESS_CP15(c9,  0, c14, 2)
26 #define PMCEID2			__ACCESS_CP15(c9,  0, c14, 4)
27 #define PMCEID3			__ACCESS_CP15(c9,  0, c14, 5)
28 #define PMMIR			__ACCESS_CP15(c9,  0, c14, 6)
29 #define PMCCFILTR		__ACCESS_CP15(c14, 0, c15, 7)
30 
31 #define PMEVCNTR0		__ACCESS_CP15(c14, 0, c8, 0)
32 #define PMEVCNTR1		__ACCESS_CP15(c14, 0, c8, 1)
33 #define PMEVCNTR2		__ACCESS_CP15(c14, 0, c8, 2)
34 #define PMEVCNTR3		__ACCESS_CP15(c14, 0, c8, 3)
35 #define PMEVCNTR4		__ACCESS_CP15(c14, 0, c8, 4)
36 #define PMEVCNTR5		__ACCESS_CP15(c14, 0, c8, 5)
37 #define PMEVCNTR6		__ACCESS_CP15(c14, 0, c8, 6)
38 #define PMEVCNTR7		__ACCESS_CP15(c14, 0, c8, 7)
39 #define PMEVCNTR8		__ACCESS_CP15(c14, 0, c9, 0)
40 #define PMEVCNTR9		__ACCESS_CP15(c14, 0, c9, 1)
41 #define PMEVCNTR10		__ACCESS_CP15(c14, 0, c9, 2)
42 #define PMEVCNTR11		__ACCESS_CP15(c14, 0, c9, 3)
43 #define PMEVCNTR12		__ACCESS_CP15(c14, 0, c9, 4)
44 #define PMEVCNTR13		__ACCESS_CP15(c14, 0, c9, 5)
45 #define PMEVCNTR14		__ACCESS_CP15(c14, 0, c9, 6)
46 #define PMEVCNTR15		__ACCESS_CP15(c14, 0, c9, 7)
47 #define PMEVCNTR16		__ACCESS_CP15(c14, 0, c10, 0)
48 #define PMEVCNTR17		__ACCESS_CP15(c14, 0, c10, 1)
49 #define PMEVCNTR18		__ACCESS_CP15(c14, 0, c10, 2)
50 #define PMEVCNTR19		__ACCESS_CP15(c14, 0, c10, 3)
51 #define PMEVCNTR20		__ACCESS_CP15(c14, 0, c10, 4)
52 #define PMEVCNTR21		__ACCESS_CP15(c14, 0, c10, 5)
53 #define PMEVCNTR22		__ACCESS_CP15(c14, 0, c10, 6)
54 #define PMEVCNTR23		__ACCESS_CP15(c14, 0, c10, 7)
55 #define PMEVCNTR24		__ACCESS_CP15(c14, 0, c11, 0)
56 #define PMEVCNTR25		__ACCESS_CP15(c14, 0, c11, 1)
57 #define PMEVCNTR26		__ACCESS_CP15(c14, 0, c11, 2)
58 #define PMEVCNTR27		__ACCESS_CP15(c14, 0, c11, 3)
59 #define PMEVCNTR28		__ACCESS_CP15(c14, 0, c11, 4)
60 #define PMEVCNTR29		__ACCESS_CP15(c14, 0, c11, 5)
61 #define PMEVCNTR30		__ACCESS_CP15(c14, 0, c11, 6)
62 
63 #define PMEVTYPER0		__ACCESS_CP15(c14, 0, c12, 0)
64 #define PMEVTYPER1		__ACCESS_CP15(c14, 0, c12, 1)
65 #define PMEVTYPER2		__ACCESS_CP15(c14, 0, c12, 2)
66 #define PMEVTYPER3		__ACCESS_CP15(c14, 0, c12, 3)
67 #define PMEVTYPER4		__ACCESS_CP15(c14, 0, c12, 4)
68 #define PMEVTYPER5		__ACCESS_CP15(c14, 0, c12, 5)
69 #define PMEVTYPER6		__ACCESS_CP15(c14, 0, c12, 6)
70 #define PMEVTYPER7		__ACCESS_CP15(c14, 0, c12, 7)
71 #define PMEVTYPER8		__ACCESS_CP15(c14, 0, c13, 0)
72 #define PMEVTYPER9		__ACCESS_CP15(c14, 0, c13, 1)
73 #define PMEVTYPER10		__ACCESS_CP15(c14, 0, c13, 2)
74 #define PMEVTYPER11		__ACCESS_CP15(c14, 0, c13, 3)
75 #define PMEVTYPER12		__ACCESS_CP15(c14, 0, c13, 4)
76 #define PMEVTYPER13		__ACCESS_CP15(c14, 0, c13, 5)
77 #define PMEVTYPER14		__ACCESS_CP15(c14, 0, c13, 6)
78 #define PMEVTYPER15		__ACCESS_CP15(c14, 0, c13, 7)
79 #define PMEVTYPER16		__ACCESS_CP15(c14, 0, c14, 0)
80 #define PMEVTYPER17		__ACCESS_CP15(c14, 0, c14, 1)
81 #define PMEVTYPER18		__ACCESS_CP15(c14, 0, c14, 2)
82 #define PMEVTYPER19		__ACCESS_CP15(c14, 0, c14, 3)
83 #define PMEVTYPER20		__ACCESS_CP15(c14, 0, c14, 4)
84 #define PMEVTYPER21		__ACCESS_CP15(c14, 0, c14, 5)
85 #define PMEVTYPER22		__ACCESS_CP15(c14, 0, c14, 6)
86 #define PMEVTYPER23		__ACCESS_CP15(c14, 0, c14, 7)
87 #define PMEVTYPER24		__ACCESS_CP15(c14, 0, c15, 0)
88 #define PMEVTYPER25		__ACCESS_CP15(c14, 0, c15, 1)
89 #define PMEVTYPER26		__ACCESS_CP15(c14, 0, c15, 2)
90 #define PMEVTYPER27		__ACCESS_CP15(c14, 0, c15, 3)
91 #define PMEVTYPER28		__ACCESS_CP15(c14, 0, c15, 4)
92 #define PMEVTYPER29		__ACCESS_CP15(c14, 0, c15, 5)
93 #define PMEVTYPER30		__ACCESS_CP15(c14, 0, c15, 6)
94 
95 #define RETURN_READ_PMEVCNTRN(n) \
96 	return read_sysreg(PMEVCNTR##n)
read_pmevcntrn(int n)97 static inline unsigned long read_pmevcntrn(int n)
98 {
99 	PMEVN_SWITCH(n, RETURN_READ_PMEVCNTRN);
100 	return 0;
101 }
102 
103 #define WRITE_PMEVCNTRN(n) \
104 	write_sysreg(val, PMEVCNTR##n)
write_pmevcntrn(int n,unsigned long val)105 static inline void write_pmevcntrn(int n, unsigned long val)
106 {
107 	PMEVN_SWITCH(n, WRITE_PMEVCNTRN);
108 }
109 
110 #define WRITE_PMEVTYPERN(n) \
111 	write_sysreg(val, PMEVTYPER##n)
write_pmevtypern(int n,unsigned long val)112 static inline void write_pmevtypern(int n, unsigned long val)
113 {
114 	PMEVN_SWITCH(n, WRITE_PMEVTYPERN);
115 }
116 
read_pmmir(void)117 static inline unsigned long read_pmmir(void)
118 {
119 	return read_sysreg(PMMIR);
120 }
121 
read_pmuver(void)122 static inline u32 read_pmuver(void)
123 {
124 	/* PMUVers is not a signed field */
125 	u32 dfr0 = read_cpuid_ext(CPUID_EXT_DFR0);
126 
127 	return (dfr0 >> 24) & 0xf;
128 }
129 
pmuv3_has_icntr(void)130 static inline bool pmuv3_has_icntr(void)
131 {
132 	/* FEAT_PMUv3_ICNTR not accessible for 32-bit */
133 	return false;
134 }
135 
write_pmcr(u32 val)136 static inline void write_pmcr(u32 val)
137 {
138 	write_sysreg(val, PMCR);
139 }
140 
read_pmcr(void)141 static inline u32 read_pmcr(void)
142 {
143 	return read_sysreg(PMCR);
144 }
145 
write_pmselr(u32 val)146 static inline void write_pmselr(u32 val)
147 {
148 	write_sysreg(val, PMSELR);
149 }
150 
write_pmccntr(u64 val)151 static inline void write_pmccntr(u64 val)
152 {
153 	write_sysreg(val, PMCCNTR);
154 }
155 
read_pmccntr(void)156 static inline u64 read_pmccntr(void)
157 {
158 	return read_sysreg(PMCCNTR);
159 }
160 
write_pmicntr(u64 val)161 static inline void write_pmicntr(u64 val) {}
162 
read_pmicntr(void)163 static inline u64 read_pmicntr(void)
164 {
165 	return 0;
166 }
167 
write_pmcntenset(u32 val)168 static inline void write_pmcntenset(u32 val)
169 {
170 	write_sysreg(val, PMCNTENSET);
171 }
172 
write_pmcntenclr(u32 val)173 static inline void write_pmcntenclr(u32 val)
174 {
175 	write_sysreg(val, PMCNTENCLR);
176 }
177 
write_pmintenset(u32 val)178 static inline void write_pmintenset(u32 val)
179 {
180 	write_sysreg(val, PMINTENSET);
181 }
182 
write_pmintenclr(u32 val)183 static inline void write_pmintenclr(u32 val)
184 {
185 	write_sysreg(val, PMINTENCLR);
186 }
187 
write_pmccfiltr(u32 val)188 static inline void write_pmccfiltr(u32 val)
189 {
190 	write_sysreg(val, PMCCFILTR);
191 }
192 
write_pmicfiltr(u64 val)193 static inline void write_pmicfiltr(u64 val) {}
194 
read_pmicfiltr(void)195 static inline u64 read_pmicfiltr(void)
196 {
197 	return 0;
198 }
199 
write_pmovsclr(u32 val)200 static inline void write_pmovsclr(u32 val)
201 {
202 	write_sysreg(val, PMOVSR);
203 }
204 
read_pmovsclr(void)205 static inline u32 read_pmovsclr(void)
206 {
207 	return read_sysreg(PMOVSR);
208 }
209 
write_pmuserenr(u32 val)210 static inline void write_pmuserenr(u32 val)
211 {
212 	write_sysreg(val, PMUSERENR);
213 }
214 
write_pmuacr(u64 val)215 static inline void write_pmuacr(u64 val) {}
216 
kvm_set_pmu_events(u32 set,struct perf_event_attr * attr)217 static inline void kvm_set_pmu_events(u32 set, struct perf_event_attr *attr) {}
kvm_clr_pmu_events(u32 clr)218 static inline void kvm_clr_pmu_events(u32 clr) {}
kvm_pmu_counter_deferred(struct perf_event_attr * attr)219 static inline bool kvm_pmu_counter_deferred(struct perf_event_attr *attr)
220 {
221 	return false;
222 }
223 
kvm_set_pmuserenr(u64 val)224 static inline bool kvm_set_pmuserenr(u64 val)
225 {
226 	return false;
227 }
228 
kvm_vcpu_pmu_resync_el0(void)229 static inline void kvm_vcpu_pmu_resync_el0(void) {}
230 
231 /* PMU Version in DFR Register */
232 #define ARMV8_PMU_DFR_VER_NI        0
233 #define ARMV8_PMU_DFR_VER_V3P1      0x4
234 #define ARMV8_PMU_DFR_VER_V3P4      0x5
235 #define ARMV8_PMU_DFR_VER_V3P5      0x6
236 #define ARMV8_PMU_DFR_VER_V3P9      0x9
237 #define ARMV8_PMU_DFR_VER_IMP_DEF   0xF
238 
pmuv3_implemented(int pmuver)239 static inline bool pmuv3_implemented(int pmuver)
240 {
241 	return !(pmuver == ARMV8_PMU_DFR_VER_IMP_DEF ||
242 		 pmuver == ARMV8_PMU_DFR_VER_NI);
243 }
244 
is_pmuv3p4(int pmuver)245 static inline bool is_pmuv3p4(int pmuver)
246 {
247 	return pmuver >= ARMV8_PMU_DFR_VER_V3P4;
248 }
249 
is_pmuv3p5(int pmuver)250 static inline bool is_pmuv3p5(int pmuver)
251 {
252 	return pmuver >= ARMV8_PMU_DFR_VER_V3P5;
253 }
254 
is_pmuv3p9(int pmuver)255 static inline bool is_pmuv3p9(int pmuver)
256 {
257 	return pmuver >= ARMV8_PMU_DFR_VER_V3P9;
258 }
259 
read_pmceid0(void)260 static inline u64 read_pmceid0(void)
261 {
262 	u64 val = read_sysreg(PMCEID0);
263 
264 	if (read_pmuver() >= ARMV8_PMU_DFR_VER_V3P1)
265 		val |= (u64)read_sysreg(PMCEID2) << 32;
266 
267 	return val;
268 }
269 
read_pmceid1(void)270 static inline u64 read_pmceid1(void)
271 {
272 	u64 val = read_sysreg(PMCEID1);
273 
274 	if (read_pmuver() >= ARMV8_PMU_DFR_VER_V3P1)
275 		val |= (u64)read_sysreg(PMCEID3) << 32;
276 
277 	return val;
278 }
279 
280 #endif
281