xref: /linux/arch/arm/include/asm/arm_pmuv3.h (revision c02ce1735b150cf7c3b43790b48e23dcd17c0d46)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright (C) 2012 ARM Ltd.
4  */
5 
6 #ifndef __ASM_PMUV3_H
7 #define __ASM_PMUV3_H
8 
9 #include <asm/cp15.h>
10 #include <asm/cputype.h>
11 
12 #define PMCCNTR			__ACCESS_CP15_64(0, c9)
13 
14 #define PMCR			__ACCESS_CP15(c9,  0, c12, 0)
15 #define PMCNTENSET		__ACCESS_CP15(c9,  0, c12, 1)
16 #define PMCNTENCLR		__ACCESS_CP15(c9,  0, c12, 2)
17 #define PMOVSR			__ACCESS_CP15(c9,  0, c12, 3)
18 #define PMSELR			__ACCESS_CP15(c9,  0, c12, 5)
19 #define PMCEID0			__ACCESS_CP15(c9,  0, c12, 6)
20 #define PMCEID1			__ACCESS_CP15(c9,  0, c12, 7)
21 #define PMXEVTYPER		__ACCESS_CP15(c9,  0, c13, 1)
22 #define PMXEVCNTR		__ACCESS_CP15(c9,  0, c13, 2)
23 #define PMUSERENR		__ACCESS_CP15(c9,  0, c14, 0)
24 #define PMINTENSET		__ACCESS_CP15(c9,  0, c14, 1)
25 #define PMINTENCLR		__ACCESS_CP15(c9,  0, c14, 2)
26 #define PMCEID2			__ACCESS_CP15(c9,  0, c14, 4)
27 #define PMCEID3			__ACCESS_CP15(c9,  0, c14, 5)
28 #define PMMIR			__ACCESS_CP15(c9,  0, c14, 6)
29 #define PMCCFILTR		__ACCESS_CP15(c14, 0, c15, 7)
30 
31 #define PMEVCNTR0		__ACCESS_CP15(c14, 0, c8, 0)
32 #define PMEVCNTR1		__ACCESS_CP15(c14, 0, c8, 1)
33 #define PMEVCNTR2		__ACCESS_CP15(c14, 0, c8, 2)
34 #define PMEVCNTR3		__ACCESS_CP15(c14, 0, c8, 3)
35 #define PMEVCNTR4		__ACCESS_CP15(c14, 0, c8, 4)
36 #define PMEVCNTR5		__ACCESS_CP15(c14, 0, c8, 5)
37 #define PMEVCNTR6		__ACCESS_CP15(c14, 0, c8, 6)
38 #define PMEVCNTR7		__ACCESS_CP15(c14, 0, c8, 7)
39 #define PMEVCNTR8		__ACCESS_CP15(c14, 0, c9, 0)
40 #define PMEVCNTR9		__ACCESS_CP15(c14, 0, c9, 1)
41 #define PMEVCNTR10		__ACCESS_CP15(c14, 0, c9, 2)
42 #define PMEVCNTR11		__ACCESS_CP15(c14, 0, c9, 3)
43 #define PMEVCNTR12		__ACCESS_CP15(c14, 0, c9, 4)
44 #define PMEVCNTR13		__ACCESS_CP15(c14, 0, c9, 5)
45 #define PMEVCNTR14		__ACCESS_CP15(c14, 0, c9, 6)
46 #define PMEVCNTR15		__ACCESS_CP15(c14, 0, c9, 7)
47 #define PMEVCNTR16		__ACCESS_CP15(c14, 0, c10, 0)
48 #define PMEVCNTR17		__ACCESS_CP15(c14, 0, c10, 1)
49 #define PMEVCNTR18		__ACCESS_CP15(c14, 0, c10, 2)
50 #define PMEVCNTR19		__ACCESS_CP15(c14, 0, c10, 3)
51 #define PMEVCNTR20		__ACCESS_CP15(c14, 0, c10, 4)
52 #define PMEVCNTR21		__ACCESS_CP15(c14, 0, c10, 5)
53 #define PMEVCNTR22		__ACCESS_CP15(c14, 0, c10, 6)
54 #define PMEVCNTR23		__ACCESS_CP15(c14, 0, c10, 7)
55 #define PMEVCNTR24		__ACCESS_CP15(c14, 0, c11, 0)
56 #define PMEVCNTR25		__ACCESS_CP15(c14, 0, c11, 1)
57 #define PMEVCNTR26		__ACCESS_CP15(c14, 0, c11, 2)
58 #define PMEVCNTR27		__ACCESS_CP15(c14, 0, c11, 3)
59 #define PMEVCNTR28		__ACCESS_CP15(c14, 0, c11, 4)
60 #define PMEVCNTR29		__ACCESS_CP15(c14, 0, c11, 5)
61 #define PMEVCNTR30		__ACCESS_CP15(c14, 0, c11, 6)
62 
63 #define PMEVTYPER0		__ACCESS_CP15(c14, 0, c12, 0)
64 #define PMEVTYPER1		__ACCESS_CP15(c14, 0, c12, 1)
65 #define PMEVTYPER2		__ACCESS_CP15(c14, 0, c12, 2)
66 #define PMEVTYPER3		__ACCESS_CP15(c14, 0, c12, 3)
67 #define PMEVTYPER4		__ACCESS_CP15(c14, 0, c12, 4)
68 #define PMEVTYPER5		__ACCESS_CP15(c14, 0, c12, 5)
69 #define PMEVTYPER6		__ACCESS_CP15(c14, 0, c12, 6)
70 #define PMEVTYPER7		__ACCESS_CP15(c14, 0, c12, 7)
71 #define PMEVTYPER8		__ACCESS_CP15(c14, 0, c13, 0)
72 #define PMEVTYPER9		__ACCESS_CP15(c14, 0, c13, 1)
73 #define PMEVTYPER10		__ACCESS_CP15(c14, 0, c13, 2)
74 #define PMEVTYPER11		__ACCESS_CP15(c14, 0, c13, 3)
75 #define PMEVTYPER12		__ACCESS_CP15(c14, 0, c13, 4)
76 #define PMEVTYPER13		__ACCESS_CP15(c14, 0, c13, 5)
77 #define PMEVTYPER14		__ACCESS_CP15(c14, 0, c13, 6)
78 #define PMEVTYPER15		__ACCESS_CP15(c14, 0, c13, 7)
79 #define PMEVTYPER16		__ACCESS_CP15(c14, 0, c14, 0)
80 #define PMEVTYPER17		__ACCESS_CP15(c14, 0, c14, 1)
81 #define PMEVTYPER18		__ACCESS_CP15(c14, 0, c14, 2)
82 #define PMEVTYPER19		__ACCESS_CP15(c14, 0, c14, 3)
83 #define PMEVTYPER20		__ACCESS_CP15(c14, 0, c14, 4)
84 #define PMEVTYPER21		__ACCESS_CP15(c14, 0, c14, 5)
85 #define PMEVTYPER22		__ACCESS_CP15(c14, 0, c14, 6)
86 #define PMEVTYPER23		__ACCESS_CP15(c14, 0, c14, 7)
87 #define PMEVTYPER24		__ACCESS_CP15(c14, 0, c15, 0)
88 #define PMEVTYPER25		__ACCESS_CP15(c14, 0, c15, 1)
89 #define PMEVTYPER26		__ACCESS_CP15(c14, 0, c15, 2)
90 #define PMEVTYPER27		__ACCESS_CP15(c14, 0, c15, 3)
91 #define PMEVTYPER28		__ACCESS_CP15(c14, 0, c15, 4)
92 #define PMEVTYPER29		__ACCESS_CP15(c14, 0, c15, 5)
93 #define PMEVTYPER30		__ACCESS_CP15(c14, 0, c15, 6)
94 
95 #define RETURN_READ_PMEVCNTRN(n) \
96 	return read_sysreg(PMEVCNTR##n)
97 static inline unsigned long read_pmevcntrn(int n)
98 {
99 	PMEVN_SWITCH(n, RETURN_READ_PMEVCNTRN);
100 	return 0;
101 }
102 
103 #define WRITE_PMEVCNTRN(n) \
104 	write_sysreg(val, PMEVCNTR##n)
105 static inline void write_pmevcntrn(int n, unsigned long val)
106 {
107 	PMEVN_SWITCH(n, WRITE_PMEVCNTRN);
108 }
109 
110 #define WRITE_PMEVTYPERN(n) \
111 	write_sysreg(val, PMEVTYPER##n)
112 static inline void write_pmevtypern(int n, unsigned long val)
113 {
114 	PMEVN_SWITCH(n, WRITE_PMEVTYPERN);
115 }
116 
117 static inline unsigned long read_pmmir(void)
118 {
119 	return read_sysreg(PMMIR);
120 }
121 
122 static inline u32 read_pmuver(void)
123 {
124 	/* PMUVers is not a signed field */
125 	u32 dfr0 = read_cpuid_ext(CPUID_EXT_DFR0);
126 
127 	return (dfr0 >> 24) & 0xf;
128 }
129 
130 static inline void write_pmcr(u32 val)
131 {
132 	write_sysreg(val, PMCR);
133 }
134 
135 static inline u32 read_pmcr(void)
136 {
137 	return read_sysreg(PMCR);
138 }
139 
140 static inline void write_pmselr(u32 val)
141 {
142 	write_sysreg(val, PMSELR);
143 }
144 
145 static inline void write_pmccntr(u64 val)
146 {
147 	write_sysreg(val, PMCCNTR);
148 }
149 
150 static inline u64 read_pmccntr(void)
151 {
152 	return read_sysreg(PMCCNTR);
153 }
154 
155 static inline void write_pmcntenset(u32 val)
156 {
157 	write_sysreg(val, PMCNTENSET);
158 }
159 
160 static inline void write_pmcntenclr(u32 val)
161 {
162 	write_sysreg(val, PMCNTENCLR);
163 }
164 
165 static inline void write_pmintenset(u32 val)
166 {
167 	write_sysreg(val, PMINTENSET);
168 }
169 
170 static inline void write_pmintenclr(u32 val)
171 {
172 	write_sysreg(val, PMINTENCLR);
173 }
174 
175 static inline void write_pmccfiltr(u32 val)
176 {
177 	write_sysreg(val, PMCCFILTR);
178 }
179 
180 static inline void write_pmovsclr(u32 val)
181 {
182 	write_sysreg(val, PMOVSR);
183 }
184 
185 static inline u32 read_pmovsclr(void)
186 {
187 	return read_sysreg(PMOVSR);
188 }
189 
190 static inline void write_pmuserenr(u32 val)
191 {
192 	write_sysreg(val, PMUSERENR);
193 }
194 
195 static inline void kvm_set_pmu_events(u32 set, struct perf_event_attr *attr) {}
196 static inline void kvm_clr_pmu_events(u32 clr) {}
197 static inline bool kvm_pmu_counter_deferred(struct perf_event_attr *attr)
198 {
199 	return false;
200 }
201 
202 static inline bool kvm_set_pmuserenr(u64 val)
203 {
204 	return false;
205 }
206 
207 static inline void kvm_vcpu_pmu_resync_el0(void) {}
208 
209 /* PMU Version in DFR Register */
210 #define ARMV8_PMU_DFR_VER_NI        0
211 #define ARMV8_PMU_DFR_VER_V3P1      0x4
212 #define ARMV8_PMU_DFR_VER_V3P4      0x5
213 #define ARMV8_PMU_DFR_VER_V3P5      0x6
214 #define ARMV8_PMU_DFR_VER_IMP_DEF   0xF
215 
216 static inline bool pmuv3_implemented(int pmuver)
217 {
218 	return !(pmuver == ARMV8_PMU_DFR_VER_IMP_DEF ||
219 		 pmuver == ARMV8_PMU_DFR_VER_NI);
220 }
221 
222 static inline bool is_pmuv3p4(int pmuver)
223 {
224 	return pmuver >= ARMV8_PMU_DFR_VER_V3P4;
225 }
226 
227 static inline bool is_pmuv3p5(int pmuver)
228 {
229 	return pmuver >= ARMV8_PMU_DFR_VER_V3P5;
230 }
231 
232 static inline u64 read_pmceid0(void)
233 {
234 	u64 val = read_sysreg(PMCEID0);
235 
236 	if (read_pmuver() >= ARMV8_PMU_DFR_VER_V3P1)
237 		val |= (u64)read_sysreg(PMCEID2) << 32;
238 
239 	return val;
240 }
241 
242 static inline u64 read_pmceid1(void)
243 {
244 	u64 val = read_sysreg(PMCEID1);
245 
246 	if (read_pmuver() >= ARMV8_PMU_DFR_VER_V3P1)
247 		val |= (u64)read_sysreg(PMCEID3) << 32;
248 
249 	return val;
250 }
251 
252 #endif
253