1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#ifdef CONFIG_CPU_V7
21
22#include <asm/cp15.h>
23#include <asm/cputype.h>
24#include <asm/irq_regs.h>
25#include <asm/vfp.h>
26#include "../vfp/vfpinstr.h"
27
28#include <linux/of.h>
29#include <linux/perf/arm_pmu.h>
30#include <linux/platform_device.h>
31
32
33
34
35
36
37
38
39#define ARMV7_PERFCTR_PMNC_SW_INCR 0x00
40#define ARMV7_PERFCTR_L1_ICACHE_REFILL 0x01
41#define ARMV7_PERFCTR_ITLB_REFILL 0x02
42#define ARMV7_PERFCTR_L1_DCACHE_REFILL 0x03
43#define ARMV7_PERFCTR_L1_DCACHE_ACCESS 0x04
44#define ARMV7_PERFCTR_DTLB_REFILL 0x05
45#define ARMV7_PERFCTR_MEM_READ 0x06
46#define ARMV7_PERFCTR_MEM_WRITE 0x07
47#define ARMV7_PERFCTR_INSTR_EXECUTED 0x08
48#define ARMV7_PERFCTR_EXC_TAKEN 0x09
49#define ARMV7_PERFCTR_EXC_EXECUTED 0x0A
50#define ARMV7_PERFCTR_CID_WRITE 0x0B
51
52
53
54
55
56
57
58
59#define ARMV7_PERFCTR_PC_WRITE 0x0C
60#define ARMV7_PERFCTR_PC_IMM_BRANCH 0x0D
61#define ARMV7_PERFCTR_PC_PROC_RETURN 0x0E
62#define ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS 0x0F
63#define ARMV7_PERFCTR_PC_BRANCH_MIS_PRED 0x10
64#define ARMV7_PERFCTR_CLOCK_CYCLES 0x11
65#define ARMV7_PERFCTR_PC_BRANCH_PRED 0x12
66
67
68#define ARMV7_PERFCTR_MEM_ACCESS 0x13
69#define ARMV7_PERFCTR_L1_ICACHE_ACCESS 0x14
70#define ARMV7_PERFCTR_L1_DCACHE_WB 0x15
71#define ARMV7_PERFCTR_L2_CACHE_ACCESS 0x16
72#define ARMV7_PERFCTR_L2_CACHE_REFILL 0x17
73#define ARMV7_PERFCTR_L2_CACHE_WB 0x18
74#define ARMV7_PERFCTR_BUS_ACCESS 0x19
75#define ARMV7_PERFCTR_MEM_ERROR 0x1A
76#define ARMV7_PERFCTR_INSTR_SPEC 0x1B
77#define ARMV7_PERFCTR_TTBR_WRITE 0x1C
78#define ARMV7_PERFCTR_BUS_CYCLES 0x1D
79
80#define ARMV7_PERFCTR_CPU_CYCLES 0xFF
81
82
83#define ARMV7_A8_PERFCTR_L2_CACHE_ACCESS 0x43
84#define ARMV7_A8_PERFCTR_L2_CACHE_REFILL 0x44
85#define ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS 0x50
86#define ARMV7_A8_PERFCTR_STALL_ISIDE 0x56
87
88
89#define ARMV7_A9_PERFCTR_INSTR_CORE_RENAME 0x68
90#define ARMV7_A9_PERFCTR_STALL_ICACHE 0x60
91#define ARMV7_A9_PERFCTR_STALL_DISPATCH 0x66
92
93
94#define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL 0xc2
95#define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP 0xc3
96
97
98#define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
99#define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
100#define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ 0x42
101#define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE 0x43
102
103#define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ 0x4C
104#define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE 0x4D
105
106#define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ 0x50
107#define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
108#define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ 0x52
109#define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE 0x53
110
111#define ARMV7_A15_PERFCTR_PC_WRITE_SPEC 0x76
112
113
114#define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
115#define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
116
117#define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ 0x50
118#define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
119
120#define ARMV7_A12_PERFCTR_PC_WRITE_SPEC 0x76
121
122#define ARMV7_A12_PERFCTR_PF_TLB_REFILL 0xe7
123
124
125#define KRAIT_PMRESR0_GROUP0 0xcc
126#define KRAIT_PMRESR1_GROUP0 0xd0
127#define KRAIT_PMRESR2_GROUP0 0xd4
128#define KRAIT_VPMRESR0_GROUP0 0xd8
129
130#define KRAIT_PERFCTR_L1_ICACHE_ACCESS 0x10011
131#define KRAIT_PERFCTR_L1_ICACHE_MISS 0x10010
132
133#define KRAIT_PERFCTR_L1_ITLB_ACCESS 0x12222
134#define KRAIT_PERFCTR_L1_DTLB_ACCESS 0x12210
135
136
137#define SCORPION_LPM0_GROUP0 0x4c
138#define SCORPION_LPM1_GROUP0 0x50
139#define SCORPION_LPM2_GROUP0 0x54
140#define SCORPION_L2LPM_GROUP0 0x58
141#define SCORPION_VLPM_GROUP0 0x5c
142
143#define SCORPION_ICACHE_ACCESS 0x10053
144#define SCORPION_ICACHE_MISS 0x10052
145
146#define SCORPION_DTLB_ACCESS 0x12013
147#define SCORPION_DTLB_MISS 0x12012
148
149#define SCORPION_ITLB_MISS 0x12021
150
151
152
153
154
155
156
157
158static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
159 PERF_MAP_ALL_UNSUPPORTED,
160 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
161 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
162 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
163 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
164 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
165 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
166 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A8_PERFCTR_STALL_ISIDE,
167};
168
169static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
170 [PERF_COUNT_HW_CACHE_OP_MAX]
171 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
172 PERF_CACHE_MAP_ALL_UNSUPPORTED,
173
174
175
176
177
178
179 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
180 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
181 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
182 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
183
184 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS,
185 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
186
187 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
188 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
189 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
190 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
191
192 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
193 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
194
195 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
196 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
197
198 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
199 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
200 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
201 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
202};
203
204
205
206
207static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
208 PERF_MAP_ALL_UNSUPPORTED,
209 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
210 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_A9_PERFCTR_INSTR_CORE_RENAME,
211 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
212 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
213 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
214 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
215 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A9_PERFCTR_STALL_ICACHE,
216 [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV7_A9_PERFCTR_STALL_DISPATCH,
217};
218
219static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
220 [PERF_COUNT_HW_CACHE_OP_MAX]
221 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
222 PERF_CACHE_MAP_ALL_UNSUPPORTED,
223
224
225
226
227
228
229 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
230 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
231 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
232 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
233
234 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
235
236 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
237 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
238
239 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
240 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
241
242 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
243 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
244 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
245 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
246};
247
248
249
250
251static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
252 PERF_MAP_ALL_UNSUPPORTED,
253 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
254 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
255 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
256 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
257 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
258 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
259};
260
261static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
262 [PERF_COUNT_HW_CACHE_OP_MAX]
263 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
264 PERF_CACHE_MAP_ALL_UNSUPPORTED,
265
266 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
267 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
268 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
269 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
270 [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
271 [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
272
273 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
274 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
275
276
277
278
279 [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
280 [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
281
282 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
283 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
284
285 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
286 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
287
288 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
289 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
290 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
291 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
292};
293
294
295
296
297static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
298 PERF_MAP_ALL_UNSUPPORTED,
299 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
300 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
301 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
302 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
303 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A15_PERFCTR_PC_WRITE_SPEC,
304 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
305 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
306};
307
308static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
309 [PERF_COUNT_HW_CACHE_OP_MAX]
310 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
311 PERF_CACHE_MAP_ALL_UNSUPPORTED,
312
313 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ,
314 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ,
315 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE,
316 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE,
317
318
319
320
321
322
323 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
324 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
325
326 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ,
327 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ,
328 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE,
329 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE,
330
331 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ,
332 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE,
333
334 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
335 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
336
337 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
338 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
339 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
340 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
341};
342
343
344
345
346static const unsigned armv7_a7_perf_map[PERF_COUNT_HW_MAX] = {
347 PERF_MAP_ALL_UNSUPPORTED,
348 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
349 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
350 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
351 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
352 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
353 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
354 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
355};
356
357static const unsigned armv7_a7_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
358 [PERF_COUNT_HW_CACHE_OP_MAX]
359 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
360 PERF_CACHE_MAP_ALL_UNSUPPORTED,
361
362
363
364
365
366
367 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
368 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
369 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
370 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
371
372 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
373 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
374
375 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
376 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
377 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
378 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
379
380 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
381 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
382
383 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
384 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
385
386 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
387 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
388 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
389 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
390};
391
392
393
394
395static const unsigned armv7_a12_perf_map[PERF_COUNT_HW_MAX] = {
396 PERF_MAP_ALL_UNSUPPORTED,
397 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
398 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
399 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
400 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
401 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A12_PERFCTR_PC_WRITE_SPEC,
402 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
403 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
404};
405
406static const unsigned armv7_a12_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
407 [PERF_COUNT_HW_CACHE_OP_MAX]
408 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
409 PERF_CACHE_MAP_ALL_UNSUPPORTED,
410
411 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ,
412 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
413 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE,
414 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
415
416
417
418
419
420
421 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
422 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
423
424 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ,
425 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
426 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE,
427 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
428
429 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
430 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
431 [C(DTLB)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A12_PERFCTR_PF_TLB_REFILL,
432
433 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
434 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
435
436 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
437 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
438 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
439 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
440};
441
442
443
444
445static const unsigned krait_perf_map[PERF_COUNT_HW_MAX] = {
446 PERF_MAP_ALL_UNSUPPORTED,
447 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
448 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
449 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
450 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
451 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
452};
453
454static const unsigned krait_perf_map_no_branch[PERF_COUNT_HW_MAX] = {
455 PERF_MAP_ALL_UNSUPPORTED,
456 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
457 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
458 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
459 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
460};
461
462static const unsigned krait_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
463 [PERF_COUNT_HW_CACHE_OP_MAX]
464 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
465 PERF_CACHE_MAP_ALL_UNSUPPORTED,
466
467
468
469
470
471
472 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
473 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
474 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
475 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
476
477 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ICACHE_ACCESS,
478 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = KRAIT_PERFCTR_L1_ICACHE_MISS,
479
480 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
481 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
482
483 [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
484 [C(ITLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
485
486 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
487 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
488 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
489 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
490};
491
492
493
494
495static const unsigned scorpion_perf_map[PERF_COUNT_HW_MAX] = {
496 PERF_MAP_ALL_UNSUPPORTED,
497 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
498 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
499 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
500 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
501 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
502};
503
504static const unsigned scorpion_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
505 [PERF_COUNT_HW_CACHE_OP_MAX]
506 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
507 PERF_CACHE_MAP_ALL_UNSUPPORTED,
508
509
510
511
512
513 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
514 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
515 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
516 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
517 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_ICACHE_ACCESS,
518 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ICACHE_MISS,
519
520
521
522
523 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
524 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
525 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
526 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
527 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
528 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
529 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
530 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
531 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
532 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
533};
534
535PMU_FORMAT_ATTR(event, "config:0-7");
536
537static struct attribute *armv7_pmu_format_attrs[] = {
538 &format_attr_event.attr,
539 NULL,
540};
541
542static struct attribute_group armv7_pmu_format_attr_group = {
543 .name = "format",
544 .attrs = armv7_pmu_format_attrs,
545};
546
547#define ARMV7_EVENT_ATTR_RESOLVE(m) #m
548#define ARMV7_EVENT_ATTR(name, config) \
549 PMU_EVENT_ATTR_STRING(name, armv7_event_attr_##name, \
550 "event=" ARMV7_EVENT_ATTR_RESOLVE(config))
551
552ARMV7_EVENT_ATTR(sw_incr, ARMV7_PERFCTR_PMNC_SW_INCR);
553ARMV7_EVENT_ATTR(l1i_cache_refill, ARMV7_PERFCTR_L1_ICACHE_REFILL);
554ARMV7_EVENT_ATTR(l1i_tlb_refill, ARMV7_PERFCTR_ITLB_REFILL);
555ARMV7_EVENT_ATTR(l1d_cache_refill, ARMV7_PERFCTR_L1_DCACHE_REFILL);
556ARMV7_EVENT_ATTR(l1d_cache, ARMV7_PERFCTR_L1_DCACHE_ACCESS);
557ARMV7_EVENT_ATTR(l1d_tlb_refill, ARMV7_PERFCTR_DTLB_REFILL);
558ARMV7_EVENT_ATTR(ld_retired, ARMV7_PERFCTR_MEM_READ);
559ARMV7_EVENT_ATTR(st_retired, ARMV7_PERFCTR_MEM_WRITE);
560ARMV7_EVENT_ATTR(inst_retired, ARMV7_PERFCTR_INSTR_EXECUTED);
561ARMV7_EVENT_ATTR(exc_taken, ARMV7_PERFCTR_EXC_TAKEN);
562ARMV7_EVENT_ATTR(exc_return, ARMV7_PERFCTR_EXC_EXECUTED);
563ARMV7_EVENT_ATTR(cid_write_retired, ARMV7_PERFCTR_CID_WRITE);
564ARMV7_EVENT_ATTR(pc_write_retired, ARMV7_PERFCTR_PC_WRITE);
565ARMV7_EVENT_ATTR(br_immed_retired, ARMV7_PERFCTR_PC_IMM_BRANCH);
566ARMV7_EVENT_ATTR(br_return_retired, ARMV7_PERFCTR_PC_PROC_RETURN);
567ARMV7_EVENT_ATTR(unaligned_ldst_retired, ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS);
568ARMV7_EVENT_ATTR(br_mis_pred, ARMV7_PERFCTR_PC_BRANCH_MIS_PRED);
569ARMV7_EVENT_ATTR(cpu_cycles, ARMV7_PERFCTR_CLOCK_CYCLES);
570ARMV7_EVENT_ATTR(br_pred, ARMV7_PERFCTR_PC_BRANCH_PRED);
571
572static struct attribute *armv7_pmuv1_event_attrs[] = {
573 &armv7_event_attr_sw_incr.attr.attr,
574 &armv7_event_attr_l1i_cache_refill.attr.attr,
575 &armv7_event_attr_l1i_tlb_refill.attr.attr,
576 &armv7_event_attr_l1d_cache_refill.attr.attr,
577 &armv7_event_attr_l1d_cache.attr.attr,
578 &armv7_event_attr_l1d_tlb_refill.attr.attr,
579 &armv7_event_attr_ld_retired.attr.attr,
580 &armv7_event_attr_st_retired.attr.attr,
581 &armv7_event_attr_inst_retired.attr.attr,
582 &armv7_event_attr_exc_taken.attr.attr,
583 &armv7_event_attr_exc_return.attr.attr,
584 &armv7_event_attr_cid_write_retired.attr.attr,
585 &armv7_event_attr_pc_write_retired.attr.attr,
586 &armv7_event_attr_br_immed_retired.attr.attr,
587 &armv7_event_attr_br_return_retired.attr.attr,
588 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
589 &armv7_event_attr_br_mis_pred.attr.attr,
590 &armv7_event_attr_cpu_cycles.attr.attr,
591 &armv7_event_attr_br_pred.attr.attr,
592 NULL,
593};
594
595static struct attribute_group armv7_pmuv1_events_attr_group = {
596 .name = "events",
597 .attrs = armv7_pmuv1_event_attrs,
598};
599
600ARMV7_EVENT_ATTR(mem_access, ARMV7_PERFCTR_MEM_ACCESS);
601ARMV7_EVENT_ATTR(l1i_cache, ARMV7_PERFCTR_L1_ICACHE_ACCESS);
602ARMV7_EVENT_ATTR(l1d_cache_wb, ARMV7_PERFCTR_L1_DCACHE_WB);
603ARMV7_EVENT_ATTR(l2d_cache, ARMV7_PERFCTR_L2_CACHE_ACCESS);
604ARMV7_EVENT_ATTR(l2d_cache_refill, ARMV7_PERFCTR_L2_CACHE_REFILL);
605ARMV7_EVENT_ATTR(l2d_cache_wb, ARMV7_PERFCTR_L2_CACHE_WB);
606ARMV7_EVENT_ATTR(bus_access, ARMV7_PERFCTR_BUS_ACCESS);
607ARMV7_EVENT_ATTR(memory_error, ARMV7_PERFCTR_MEM_ERROR);
608ARMV7_EVENT_ATTR(inst_spec, ARMV7_PERFCTR_INSTR_SPEC);
609ARMV7_EVENT_ATTR(ttbr_write_retired, ARMV7_PERFCTR_TTBR_WRITE);
610ARMV7_EVENT_ATTR(bus_cycles, ARMV7_PERFCTR_BUS_CYCLES);
611
612static struct attribute *armv7_pmuv2_event_attrs[] = {
613 &armv7_event_attr_sw_incr.attr.attr,
614 &armv7_event_attr_l1i_cache_refill.attr.attr,
615 &armv7_event_attr_l1i_tlb_refill.attr.attr,
616 &armv7_event_attr_l1d_cache_refill.attr.attr,
617 &armv7_event_attr_l1d_cache.attr.attr,
618 &armv7_event_attr_l1d_tlb_refill.attr.attr,
619 &armv7_event_attr_ld_retired.attr.attr,
620 &armv7_event_attr_st_retired.attr.attr,
621 &armv7_event_attr_inst_retired.attr.attr,
622 &armv7_event_attr_exc_taken.attr.attr,
623 &armv7_event_attr_exc_return.attr.attr,
624 &armv7_event_attr_cid_write_retired.attr.attr,
625 &armv7_event_attr_pc_write_retired.attr.attr,
626 &armv7_event_attr_br_immed_retired.attr.attr,
627 &armv7_event_attr_br_return_retired.attr.attr,
628 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
629 &armv7_event_attr_br_mis_pred.attr.attr,
630 &armv7_event_attr_cpu_cycles.attr.attr,
631 &armv7_event_attr_br_pred.attr.attr,
632 &armv7_event_attr_mem_access.attr.attr,
633 &armv7_event_attr_l1i_cache.attr.attr,
634 &armv7_event_attr_l1d_cache_wb.attr.attr,
635 &armv7_event_attr_l2d_cache.attr.attr,
636 &armv7_event_attr_l2d_cache_refill.attr.attr,
637 &armv7_event_attr_l2d_cache_wb.attr.attr,
638 &armv7_event_attr_bus_access.attr.attr,
639 &armv7_event_attr_memory_error.attr.attr,
640 &armv7_event_attr_inst_spec.attr.attr,
641 &armv7_event_attr_ttbr_write_retired.attr.attr,
642 &armv7_event_attr_bus_cycles.attr.attr,
643 NULL,
644};
645
646static struct attribute_group armv7_pmuv2_events_attr_group = {
647 .name = "events",
648 .attrs = armv7_pmuv2_event_attrs,
649};
650
651
652
653
654#define ARMV7_IDX_CYCLE_COUNTER 0
655#define ARMV7_IDX_COUNTER0 1
656#define ARMV7_IDX_COUNTER_LAST(cpu_pmu) \
657 (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
658
659#define ARMV7_MAX_COUNTERS 32
660#define ARMV7_COUNTER_MASK (ARMV7_MAX_COUNTERS - 1)
661
662
663
664
665
666
667
668
669#define ARMV7_IDX_TO_COUNTER(x) \
670 (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
671
672
673
674
675#define ARMV7_PMNC_E (1 << 0)
676#define ARMV7_PMNC_P (1 << 1)
677#define ARMV7_PMNC_C (1 << 2)
678#define ARMV7_PMNC_D (1 << 3)
679#define ARMV7_PMNC_X (1 << 4)
680#define ARMV7_PMNC_DP (1 << 5)
681#define ARMV7_PMNC_N_SHIFT 11
682#define ARMV7_PMNC_N_MASK 0x1f
683#define ARMV7_PMNC_MASK 0x3f
684
685
686
687
688#define ARMV7_FLAG_MASK 0xffffffff
689#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
690
691
692
693
694#define ARMV7_EVTYPE_MASK 0xc80000ff
695#define ARMV7_EVTYPE_EVENT 0xff
696
697
698
699
700#define ARMV7_EXCLUDE_PL1 BIT(31)
701#define ARMV7_EXCLUDE_USER BIT(30)
702#define ARMV7_INCLUDE_HYP BIT(27)
703
704
705
706
707#define ARMV7_SDER_SUNIDEN BIT(1)
708
709static inline u32 armv7_pmnc_read(void)
710{
711 u32 val;
712 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
713 return val;
714}
715
716static inline void armv7_pmnc_write(u32 val)
717{
718 val &= ARMV7_PMNC_MASK;
719 isb();
720 asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
721}
722
723static inline int armv7_pmnc_has_overflowed(u32 pmnc)
724{
725 return pmnc & ARMV7_OVERFLOWED_MASK;
726}
727
728static inline int armv7_pmnc_counter_valid(struct arm_pmu *cpu_pmu, int idx)
729{
730 return idx >= ARMV7_IDX_CYCLE_COUNTER &&
731 idx <= ARMV7_IDX_COUNTER_LAST(cpu_pmu);
732}
733
734static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
735{
736 return pmnc & BIT(ARMV7_IDX_TO_COUNTER(idx));
737}
738
739static inline void armv7_pmnc_select_counter(int idx)
740{
741 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
742 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
743 isb();
744}
745
746static inline u64 armv7pmu_read_counter(struct perf_event *event)
747{
748 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
749 struct hw_perf_event *hwc = &event->hw;
750 int idx = hwc->idx;
751 u32 value = 0;
752
753 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
754 pr_err("CPU%u reading wrong counter %d\n",
755 smp_processor_id(), idx);
756 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
757 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
758 } else {
759 armv7_pmnc_select_counter(idx);
760 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
761 }
762
763 return value;
764}
765
766static inline void armv7pmu_write_counter(struct perf_event *event, u64 value)
767{
768 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
769 struct hw_perf_event *hwc = &event->hw;
770 int idx = hwc->idx;
771
772 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
773 pr_err("CPU%u writing wrong counter %d\n",
774 smp_processor_id(), idx);
775 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
776 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" ((u32)value));
777 } else {
778 armv7_pmnc_select_counter(idx);
779 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" ((u32)value));
780 }
781}
782
783static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
784{
785 armv7_pmnc_select_counter(idx);
786 val &= ARMV7_EVTYPE_MASK;
787 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
788}
789
790static inline void armv7_pmnc_enable_counter(int idx)
791{
792 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
793 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
794}
795
796static inline void armv7_pmnc_disable_counter(int idx)
797{
798 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
799 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
800}
801
802static inline void armv7_pmnc_enable_intens(int idx)
803{
804 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
805 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
806}
807
808static inline void armv7_pmnc_disable_intens(int idx)
809{
810 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
811 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
812 isb();
813
814 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (BIT(counter)));
815 isb();
816}
817
818static inline u32 armv7_pmnc_getreset_flags(void)
819{
820 u32 val;
821
822
823 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
824
825
826 val &= ARMV7_FLAG_MASK;
827 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
828
829 return val;
830}
831
832#ifdef DEBUG
833static void armv7_pmnc_dump_regs(struct arm_pmu *cpu_pmu)
834{
835 u32 val;
836 unsigned int cnt;
837
838 pr_info("PMNC registers dump:\n");
839
840 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
841 pr_info("PMNC =0x%08x\n", val);
842
843 asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
844 pr_info("CNTENS=0x%08x\n", val);
845
846 asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
847 pr_info("INTENS=0x%08x\n", val);
848
849 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
850 pr_info("FLAGS =0x%08x\n", val);
851
852 asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
853 pr_info("SELECT=0x%08x\n", val);
854
855 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
856 pr_info("CCNT =0x%08x\n", val);
857
858 for (cnt = ARMV7_IDX_COUNTER0;
859 cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
860 armv7_pmnc_select_counter(cnt);
861 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
862 pr_info("CNT[%d] count =0x%08x\n",
863 ARMV7_IDX_TO_COUNTER(cnt), val);
864 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
865 pr_info("CNT[%d] evtsel=0x%08x\n",
866 ARMV7_IDX_TO_COUNTER(cnt), val);
867 }
868}
869#endif
870
871static void armv7pmu_enable_event(struct perf_event *event)
872{
873 unsigned long flags;
874 struct hw_perf_event *hwc = &event->hw;
875 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
876 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
877 int idx = hwc->idx;
878
879 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
880 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
881 smp_processor_id(), idx);
882 return;
883 }
884
885
886
887
888
889 raw_spin_lock_irqsave(&events->pmu_lock, flags);
890
891
892
893
894 armv7_pmnc_disable_counter(idx);
895
896
897
898
899
900
901 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER)
902 armv7_pmnc_write_evtsel(idx, hwc->config_base);
903
904
905
906
907 armv7_pmnc_enable_intens(idx);
908
909
910
911
912 armv7_pmnc_enable_counter(idx);
913
914 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
915}
916
917static void armv7pmu_disable_event(struct perf_event *event)
918{
919 unsigned long flags;
920 struct hw_perf_event *hwc = &event->hw;
921 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
922 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
923 int idx = hwc->idx;
924
925 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
926 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
927 smp_processor_id(), idx);
928 return;
929 }
930
931
932
933
934 raw_spin_lock_irqsave(&events->pmu_lock, flags);
935
936
937
938
939 armv7_pmnc_disable_counter(idx);
940
941
942
943
944 armv7_pmnc_disable_intens(idx);
945
946 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
947}
948
949static irqreturn_t armv7pmu_handle_irq(struct arm_pmu *cpu_pmu)
950{
951 u32 pmnc;
952 struct perf_sample_data data;
953 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
954 struct pt_regs *regs;
955 int idx;
956
957
958
959
960 pmnc = armv7_pmnc_getreset_flags();
961
962
963
964
965 if (!armv7_pmnc_has_overflowed(pmnc))
966 return IRQ_NONE;
967
968
969
970
971 regs = get_irq_regs();
972
973 for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
974 struct perf_event *event = cpuc->events[idx];
975 struct hw_perf_event *hwc;
976
977
978 if (!event)
979 continue;
980
981
982
983
984
985 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
986 continue;
987
988 hwc = &event->hw;
989 armpmu_event_update(event);
990 perf_sample_data_init(&data, 0, hwc->last_period);
991 if (!armpmu_event_set_period(event))
992 continue;
993
994 if (perf_event_overflow(event, &data, regs))
995 cpu_pmu->disable(event);
996 }
997
998
999
1000
1001
1002
1003
1004
1005 irq_work_run();
1006
1007 return IRQ_HANDLED;
1008}
1009
1010static void armv7pmu_start(struct arm_pmu *cpu_pmu)
1011{
1012 unsigned long flags;
1013 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1014
1015 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1016
1017 armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1018 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1019}
1020
1021static void armv7pmu_stop(struct arm_pmu *cpu_pmu)
1022{
1023 unsigned long flags;
1024 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1025
1026 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1027
1028 armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1029 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1030}
1031
1032static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc,
1033 struct perf_event *event)
1034{
1035 int idx;
1036 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1037 struct hw_perf_event *hwc = &event->hw;
1038 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT;
1039
1040
1041 if (evtype == ARMV7_PERFCTR_CPU_CYCLES) {
1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1043 return -EAGAIN;
1044
1045 return ARMV7_IDX_CYCLE_COUNTER;
1046 }
1047
1048
1049
1050
1051
1052 for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
1053 if (!test_and_set_bit(idx, cpuc->used_mask))
1054 return idx;
1055 }
1056
1057
1058 return -EAGAIN;
1059}
1060
1061static void armv7pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1062 struct perf_event *event)
1063{
1064 clear_bit(event->hw.idx, cpuc->used_mask);
1065}
1066
1067
1068
1069
1070static int armv7pmu_set_event_filter(struct hw_perf_event *event,
1071 struct perf_event_attr *attr)
1072{
1073 unsigned long config_base = 0;
1074
1075 if (attr->exclude_idle)
1076 return -EPERM;
1077 if (attr->exclude_user)
1078 config_base |= ARMV7_EXCLUDE_USER;
1079 if (attr->exclude_kernel)
1080 config_base |= ARMV7_EXCLUDE_PL1;
1081 if (!attr->exclude_hv)
1082 config_base |= ARMV7_INCLUDE_HYP;
1083
1084
1085
1086
1087
1088 event->config_base = config_base;
1089
1090 return 0;
1091}
1092
1093static void armv7pmu_reset(void *info)
1094{
1095 struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
1096 u32 idx, nb_cnt = cpu_pmu->num_events, val;
1097
1098 if (cpu_pmu->secure_access) {
1099 asm volatile("mrc p15, 0, %0, c1, c1, 1" : "=r" (val));
1100 val |= ARMV7_SDER_SUNIDEN;
1101 asm volatile("mcr p15, 0, %0, c1, c1, 1" : : "r" (val));
1102 }
1103
1104
1105 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1106 armv7_pmnc_disable_counter(idx);
1107 armv7_pmnc_disable_intens(idx);
1108 }
1109
1110
1111 armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1112}
1113
1114static int armv7_a8_map_event(struct perf_event *event)
1115{
1116 return armpmu_map_event(event, &armv7_a8_perf_map,
1117 &armv7_a8_perf_cache_map, 0xFF);
1118}
1119
1120static int armv7_a9_map_event(struct perf_event *event)
1121{
1122 return armpmu_map_event(event, &armv7_a9_perf_map,
1123 &armv7_a9_perf_cache_map, 0xFF);
1124}
1125
1126static int armv7_a5_map_event(struct perf_event *event)
1127{
1128 return armpmu_map_event(event, &armv7_a5_perf_map,
1129 &armv7_a5_perf_cache_map, 0xFF);
1130}
1131
1132static int armv7_a15_map_event(struct perf_event *event)
1133{
1134 return armpmu_map_event(event, &armv7_a15_perf_map,
1135 &armv7_a15_perf_cache_map, 0xFF);
1136}
1137
1138static int armv7_a7_map_event(struct perf_event *event)
1139{
1140 return armpmu_map_event(event, &armv7_a7_perf_map,
1141 &armv7_a7_perf_cache_map, 0xFF);
1142}
1143
1144static int armv7_a12_map_event(struct perf_event *event)
1145{
1146 return armpmu_map_event(event, &armv7_a12_perf_map,
1147 &armv7_a12_perf_cache_map, 0xFF);
1148}
1149
1150static int krait_map_event(struct perf_event *event)
1151{
1152 return armpmu_map_event(event, &krait_perf_map,
1153 &krait_perf_cache_map, 0xFFFFF);
1154}
1155
1156static int krait_map_event_no_branch(struct perf_event *event)
1157{
1158 return armpmu_map_event(event, &krait_perf_map_no_branch,
1159 &krait_perf_cache_map, 0xFFFFF);
1160}
1161
1162static int scorpion_map_event(struct perf_event *event)
1163{
1164 return armpmu_map_event(event, &scorpion_perf_map,
1165 &scorpion_perf_cache_map, 0xFFFFF);
1166}
1167
1168static void armv7pmu_init(struct arm_pmu *cpu_pmu)
1169{
1170 cpu_pmu->handle_irq = armv7pmu_handle_irq;
1171 cpu_pmu->enable = armv7pmu_enable_event;
1172 cpu_pmu->disable = armv7pmu_disable_event;
1173 cpu_pmu->read_counter = armv7pmu_read_counter;
1174 cpu_pmu->write_counter = armv7pmu_write_counter;
1175 cpu_pmu->get_event_idx = armv7pmu_get_event_idx;
1176 cpu_pmu->clear_event_idx = armv7pmu_clear_event_idx;
1177 cpu_pmu->start = armv7pmu_start;
1178 cpu_pmu->stop = armv7pmu_stop;
1179 cpu_pmu->reset = armv7pmu_reset;
1180};
1181
1182static void armv7_read_num_pmnc_events(void *info)
1183{
1184 int *nb_cnt = info;
1185
1186
1187 *nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1188
1189
1190 *nb_cnt += 1;
1191}
1192
1193static int armv7_probe_num_events(struct arm_pmu *arm_pmu)
1194{
1195 return smp_call_function_any(&arm_pmu->supported_cpus,
1196 armv7_read_num_pmnc_events,
1197 &arm_pmu->num_events, 1);
1198}
1199
1200static int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1201{
1202 armv7pmu_init(cpu_pmu);
1203 cpu_pmu->name = "armv7_cortex_a8";
1204 cpu_pmu->map_event = armv7_a8_map_event;
1205 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1206 &armv7_pmuv1_events_attr_group;
1207 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1208 &armv7_pmu_format_attr_group;
1209 return armv7_probe_num_events(cpu_pmu);
1210}
1211
1212static int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1213{
1214 armv7pmu_init(cpu_pmu);
1215 cpu_pmu->name = "armv7_cortex_a9";
1216 cpu_pmu->map_event = armv7_a9_map_event;
1217 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1218 &armv7_pmuv1_events_attr_group;
1219 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1220 &armv7_pmu_format_attr_group;
1221 return armv7_probe_num_events(cpu_pmu);
1222}
1223
1224static int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1225{
1226 armv7pmu_init(cpu_pmu);
1227 cpu_pmu->name = "armv7_cortex_a5";
1228 cpu_pmu->map_event = armv7_a5_map_event;
1229 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1230 &armv7_pmuv1_events_attr_group;
1231 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1232 &armv7_pmu_format_attr_group;
1233 return armv7_probe_num_events(cpu_pmu);
1234}
1235
1236static int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1237{
1238 armv7pmu_init(cpu_pmu);
1239 cpu_pmu->name = "armv7_cortex_a15";
1240 cpu_pmu->map_event = armv7_a15_map_event;
1241 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1242 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1243 &armv7_pmuv2_events_attr_group;
1244 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1245 &armv7_pmu_format_attr_group;
1246 return armv7_probe_num_events(cpu_pmu);
1247}
1248
1249static int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1250{
1251 armv7pmu_init(cpu_pmu);
1252 cpu_pmu->name = "armv7_cortex_a7";
1253 cpu_pmu->map_event = armv7_a7_map_event;
1254 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1255 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1256 &armv7_pmuv2_events_attr_group;
1257 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1258 &armv7_pmu_format_attr_group;
1259 return armv7_probe_num_events(cpu_pmu);
1260}
1261
1262static int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1263{
1264 armv7pmu_init(cpu_pmu);
1265 cpu_pmu->name = "armv7_cortex_a12";
1266 cpu_pmu->map_event = armv7_a12_map_event;
1267 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1268 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1269 &armv7_pmuv2_events_attr_group;
1270 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1271 &armv7_pmu_format_attr_group;
1272 return armv7_probe_num_events(cpu_pmu);
1273}
1274
1275static int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
1276{
1277 int ret = armv7_a12_pmu_init(cpu_pmu);
1278 cpu_pmu->name = "armv7_cortex_a17";
1279 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1280 &armv7_pmuv2_events_attr_group;
1281 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1282 &armv7_pmu_format_attr_group;
1283 return ret;
1284}
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319#define KRAIT_EVENT (1 << 16)
1320#define VENUM_EVENT (2 << 16)
1321#define KRAIT_EVENT_MASK (KRAIT_EVENT | VENUM_EVENT)
1322#define PMRESRn_EN BIT(31)
1323
1324#define EVENT_REGION(event) (((event) >> 12) & 0xf)
1325#define EVENT_GROUP(event) ((event) & 0xf)
1326#define EVENT_CODE(event) (((event) >> 4) & 0xff)
1327#define EVENT_VENUM(event) (!!(event & VENUM_EVENT))
1328#define EVENT_CPU(event) (!!(event & KRAIT_EVENT))
1329
1330static u32 krait_read_pmresrn(int n)
1331{
1332 u32 val;
1333
1334 switch (n) {
1335 case 0:
1336 asm volatile("mrc p15, 1, %0, c9, c15, 0" : "=r" (val));
1337 break;
1338 case 1:
1339 asm volatile("mrc p15, 1, %0, c9, c15, 1" : "=r" (val));
1340 break;
1341 case 2:
1342 asm volatile("mrc p15, 1, %0, c9, c15, 2" : "=r" (val));
1343 break;
1344 default:
1345 BUG();
1346 }
1347
1348 return val;
1349}
1350
1351static void krait_write_pmresrn(int n, u32 val)
1352{
1353 switch (n) {
1354 case 0:
1355 asm volatile("mcr p15, 1, %0, c9, c15, 0" : : "r" (val));
1356 break;
1357 case 1:
1358 asm volatile("mcr p15, 1, %0, c9, c15, 1" : : "r" (val));
1359 break;
1360 case 2:
1361 asm volatile("mcr p15, 1, %0, c9, c15, 2" : : "r" (val));
1362 break;
1363 default:
1364 BUG();
1365 }
1366}
1367
1368static u32 venum_read_pmresr(void)
1369{
1370 u32 val;
1371 asm volatile("mrc p10, 7, %0, c11, c0, 0" : "=r" (val));
1372 return val;
1373}
1374
1375static void venum_write_pmresr(u32 val)
1376{
1377 asm volatile("mcr p10, 7, %0, c11, c0, 0" : : "r" (val));
1378}
1379
1380static void venum_pre_pmresr(u32 *venum_orig_val, u32 *fp_orig_val)
1381{
1382 u32 venum_new_val;
1383 u32 fp_new_val;
1384
1385 BUG_ON(preemptible());
1386
1387 *venum_orig_val = get_copro_access();
1388 venum_new_val = *venum_orig_val | CPACC_SVC(10) | CPACC_SVC(11);
1389 set_copro_access(venum_new_val);
1390
1391
1392 *fp_orig_val = fmrx(FPEXC);
1393 fp_new_val = *fp_orig_val | FPEXC_EN;
1394 fmxr(FPEXC, fp_new_val);
1395}
1396
1397static void venum_post_pmresr(u32 venum_orig_val, u32 fp_orig_val)
1398{
1399 BUG_ON(preemptible());
1400
1401 fmxr(FPEXC, fp_orig_val);
1402 isb();
1403
1404 set_copro_access(venum_orig_val);
1405}
1406
1407static u32 krait_get_pmresrn_event(unsigned int region)
1408{
1409 static const u32 pmresrn_table[] = { KRAIT_PMRESR0_GROUP0,
1410 KRAIT_PMRESR1_GROUP0,
1411 KRAIT_PMRESR2_GROUP0 };
1412 return pmresrn_table[region];
1413}
1414
1415static void krait_evt_setup(int idx, u32 config_base)
1416{
1417 u32 val;
1418 u32 mask;
1419 u32 vval, fval;
1420 unsigned int region = EVENT_REGION(config_base);
1421 unsigned int group = EVENT_GROUP(config_base);
1422 unsigned int code = EVENT_CODE(config_base);
1423 unsigned int group_shift;
1424 bool venum_event = EVENT_VENUM(config_base);
1425
1426 group_shift = group * 8;
1427 mask = 0xff << group_shift;
1428
1429
1430 if (venum_event)
1431 val = KRAIT_VPMRESR0_GROUP0;
1432 else
1433 val = krait_get_pmresrn_event(region);
1434 val += group;
1435
1436 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1437 armv7_pmnc_write_evtsel(idx, val);
1438
1439 if (venum_event) {
1440 venum_pre_pmresr(&vval, &fval);
1441 val = venum_read_pmresr();
1442 val &= ~mask;
1443 val |= code << group_shift;
1444 val |= PMRESRn_EN;
1445 venum_write_pmresr(val);
1446 venum_post_pmresr(vval, fval);
1447 } else {
1448 val = krait_read_pmresrn(region);
1449 val &= ~mask;
1450 val |= code << group_shift;
1451 val |= PMRESRn_EN;
1452 krait_write_pmresrn(region, val);
1453 }
1454}
1455
1456static u32 clear_pmresrn_group(u32 val, int group)
1457{
1458 u32 mask;
1459 int group_shift;
1460
1461 group_shift = group * 8;
1462 mask = 0xff << group_shift;
1463 val &= ~mask;
1464
1465
1466 if (val & ~PMRESRn_EN)
1467 return val |= PMRESRn_EN;
1468
1469 return 0;
1470}
1471
1472static void krait_clearpmu(u32 config_base)
1473{
1474 u32 val;
1475 u32 vval, fval;
1476 unsigned int region = EVENT_REGION(config_base);
1477 unsigned int group = EVENT_GROUP(config_base);
1478 bool venum_event = EVENT_VENUM(config_base);
1479
1480 if (venum_event) {
1481 venum_pre_pmresr(&vval, &fval);
1482 val = venum_read_pmresr();
1483 val = clear_pmresrn_group(val, group);
1484 venum_write_pmresr(val);
1485 venum_post_pmresr(vval, fval);
1486 } else {
1487 val = krait_read_pmresrn(region);
1488 val = clear_pmresrn_group(val, group);
1489 krait_write_pmresrn(region, val);
1490 }
1491}
1492
1493static void krait_pmu_disable_event(struct perf_event *event)
1494{
1495 unsigned long flags;
1496 struct hw_perf_event *hwc = &event->hw;
1497 int idx = hwc->idx;
1498 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1499 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1500
1501
1502 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1503
1504
1505 armv7_pmnc_disable_counter(idx);
1506
1507
1508
1509
1510 if (hwc->config_base & KRAIT_EVENT_MASK)
1511 krait_clearpmu(hwc->config_base);
1512
1513
1514 armv7_pmnc_disable_intens(idx);
1515
1516 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1517}
1518
1519static void krait_pmu_enable_event(struct perf_event *event)
1520{
1521 unsigned long flags;
1522 struct hw_perf_event *hwc = &event->hw;
1523 int idx = hwc->idx;
1524 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1525 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1526
1527
1528
1529
1530
1531 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1532
1533
1534 armv7_pmnc_disable_counter(idx);
1535
1536
1537
1538
1539
1540
1541 if (hwc->config_base & KRAIT_EVENT_MASK)
1542 krait_evt_setup(idx, hwc->config_base);
1543 else
1544 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1545
1546
1547 armv7_pmnc_enable_intens(idx);
1548
1549
1550 armv7_pmnc_enable_counter(idx);
1551
1552 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1553}
1554
1555static void krait_pmu_reset(void *info)
1556{
1557 u32 vval, fval;
1558 struct arm_pmu *cpu_pmu = info;
1559 u32 idx, nb_cnt = cpu_pmu->num_events;
1560
1561 armv7pmu_reset(info);
1562
1563
1564 krait_write_pmresrn(0, 0);
1565 krait_write_pmresrn(1, 0);
1566 krait_write_pmresrn(2, 0);
1567
1568 venum_pre_pmresr(&vval, &fval);
1569 venum_write_pmresr(0);
1570 venum_post_pmresr(vval, fval);
1571
1572
1573 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1574 armv7_pmnc_select_counter(idx);
1575 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1576 }
1577
1578}
1579
1580static int krait_event_to_bit(struct perf_event *event, unsigned int region,
1581 unsigned int group)
1582{
1583 int bit;
1584 struct hw_perf_event *hwc = &event->hw;
1585 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1586
1587 if (hwc->config_base & VENUM_EVENT)
1588 bit = KRAIT_VPMRESR0_GROUP0;
1589 else
1590 bit = krait_get_pmresrn_event(region);
1591 bit -= krait_get_pmresrn_event(0);
1592 bit += group;
1593
1594
1595
1596
1597 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1598
1599 return bit;
1600}
1601
1602
1603
1604
1605
1606static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1607 struct perf_event *event)
1608{
1609 int idx;
1610 int bit = -1;
1611 struct hw_perf_event *hwc = &event->hw;
1612 unsigned int region = EVENT_REGION(hwc->config_base);
1613 unsigned int code = EVENT_CODE(hwc->config_base);
1614 unsigned int group = EVENT_GROUP(hwc->config_base);
1615 bool venum_event = EVENT_VENUM(hwc->config_base);
1616 bool krait_event = EVENT_CPU(hwc->config_base);
1617
1618 if (venum_event || krait_event) {
1619
1620 if (group > 3 || region > 2)
1621 return -EINVAL;
1622 if (venum_event && (code & 0xe0))
1623 return -EINVAL;
1624
1625 bit = krait_event_to_bit(event, region, group);
1626 if (test_and_set_bit(bit, cpuc->used_mask))
1627 return -EAGAIN;
1628 }
1629
1630 idx = armv7pmu_get_event_idx(cpuc, event);
1631 if (idx < 0 && bit >= 0)
1632 clear_bit(bit, cpuc->used_mask);
1633
1634 return idx;
1635}
1636
1637static void krait_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1638 struct perf_event *event)
1639{
1640 int bit;
1641 struct hw_perf_event *hwc = &event->hw;
1642 unsigned int region = EVENT_REGION(hwc->config_base);
1643 unsigned int group = EVENT_GROUP(hwc->config_base);
1644 bool venum_event = EVENT_VENUM(hwc->config_base);
1645 bool krait_event = EVENT_CPU(hwc->config_base);
1646
1647 armv7pmu_clear_event_idx(cpuc, event);
1648 if (venum_event || krait_event) {
1649 bit = krait_event_to_bit(event, region, group);
1650 clear_bit(bit, cpuc->used_mask);
1651 }
1652}
1653
1654static int krait_pmu_init(struct arm_pmu *cpu_pmu)
1655{
1656 armv7pmu_init(cpu_pmu);
1657 cpu_pmu->name = "armv7_krait";
1658
1659 if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node,
1660 "qcom,no-pc-write"))
1661 cpu_pmu->map_event = krait_map_event_no_branch;
1662 else
1663 cpu_pmu->map_event = krait_map_event;
1664 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1665 cpu_pmu->reset = krait_pmu_reset;
1666 cpu_pmu->enable = krait_pmu_enable_event;
1667 cpu_pmu->disable = krait_pmu_disable_event;
1668 cpu_pmu->get_event_idx = krait_pmu_get_event_idx;
1669 cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx;
1670 return armv7_probe_num_events(cpu_pmu);
1671}
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709static u32 scorpion_read_pmresrn(int n)
1710{
1711 u32 val;
1712
1713 switch (n) {
1714 case 0:
1715 asm volatile("mrc p15, 0, %0, c15, c0, 0" : "=r" (val));
1716 break;
1717 case 1:
1718 asm volatile("mrc p15, 1, %0, c15, c0, 0" : "=r" (val));
1719 break;
1720 case 2:
1721 asm volatile("mrc p15, 2, %0, c15, c0, 0" : "=r" (val));
1722 break;
1723 case 3:
1724 asm volatile("mrc p15, 3, %0, c15, c2, 0" : "=r" (val));
1725 break;
1726 default:
1727 BUG();
1728 }
1729
1730 return val;
1731}
1732
1733static void scorpion_write_pmresrn(int n, u32 val)
1734{
1735 switch (n) {
1736 case 0:
1737 asm volatile("mcr p15, 0, %0, c15, c0, 0" : : "r" (val));
1738 break;
1739 case 1:
1740 asm volatile("mcr p15, 1, %0, c15, c0, 0" : : "r" (val));
1741 break;
1742 case 2:
1743 asm volatile("mcr p15, 2, %0, c15, c0, 0" : : "r" (val));
1744 break;
1745 case 3:
1746 asm volatile("mcr p15, 3, %0, c15, c2, 0" : : "r" (val));
1747 break;
1748 default:
1749 BUG();
1750 }
1751}
1752
1753static u32 scorpion_get_pmresrn_event(unsigned int region)
1754{
1755 static const u32 pmresrn_table[] = { SCORPION_LPM0_GROUP0,
1756 SCORPION_LPM1_GROUP0,
1757 SCORPION_LPM2_GROUP0,
1758 SCORPION_L2LPM_GROUP0 };
1759 return pmresrn_table[region];
1760}
1761
1762static void scorpion_evt_setup(int idx, u32 config_base)
1763{
1764 u32 val;
1765 u32 mask;
1766 u32 vval, fval;
1767 unsigned int region = EVENT_REGION(config_base);
1768 unsigned int group = EVENT_GROUP(config_base);
1769 unsigned int code = EVENT_CODE(config_base);
1770 unsigned int group_shift;
1771 bool venum_event = EVENT_VENUM(config_base);
1772
1773 group_shift = group * 8;
1774 mask = 0xff << group_shift;
1775
1776
1777 if (venum_event)
1778 val = SCORPION_VLPM_GROUP0;
1779 else
1780 val = scorpion_get_pmresrn_event(region);
1781 val += group;
1782
1783 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1784 armv7_pmnc_write_evtsel(idx, val);
1785
1786 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1787
1788 if (venum_event) {
1789 venum_pre_pmresr(&vval, &fval);
1790 val = venum_read_pmresr();
1791 val &= ~mask;
1792 val |= code << group_shift;
1793 val |= PMRESRn_EN;
1794 venum_write_pmresr(val);
1795 venum_post_pmresr(vval, fval);
1796 } else {
1797 val = scorpion_read_pmresrn(region);
1798 val &= ~mask;
1799 val |= code << group_shift;
1800 val |= PMRESRn_EN;
1801 scorpion_write_pmresrn(region, val);
1802 }
1803}
1804
1805static void scorpion_clearpmu(u32 config_base)
1806{
1807 u32 val;
1808 u32 vval, fval;
1809 unsigned int region = EVENT_REGION(config_base);
1810 unsigned int group = EVENT_GROUP(config_base);
1811 bool venum_event = EVENT_VENUM(config_base);
1812
1813 if (venum_event) {
1814 venum_pre_pmresr(&vval, &fval);
1815 val = venum_read_pmresr();
1816 val = clear_pmresrn_group(val, group);
1817 venum_write_pmresr(val);
1818 venum_post_pmresr(vval, fval);
1819 } else {
1820 val = scorpion_read_pmresrn(region);
1821 val = clear_pmresrn_group(val, group);
1822 scorpion_write_pmresrn(region, val);
1823 }
1824}
1825
1826static void scorpion_pmu_disable_event(struct perf_event *event)
1827{
1828 unsigned long flags;
1829 struct hw_perf_event *hwc = &event->hw;
1830 int idx = hwc->idx;
1831 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1832 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1833
1834
1835 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1836
1837
1838 armv7_pmnc_disable_counter(idx);
1839
1840
1841
1842
1843 if (hwc->config_base & KRAIT_EVENT_MASK)
1844 scorpion_clearpmu(hwc->config_base);
1845
1846
1847 armv7_pmnc_disable_intens(idx);
1848
1849 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1850}
1851
1852static void scorpion_pmu_enable_event(struct perf_event *event)
1853{
1854 unsigned long flags;
1855 struct hw_perf_event *hwc = &event->hw;
1856 int idx = hwc->idx;
1857 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1858 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1859
1860
1861
1862
1863
1864 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1865
1866
1867 armv7_pmnc_disable_counter(idx);
1868
1869
1870
1871
1872
1873
1874 if (hwc->config_base & KRAIT_EVENT_MASK)
1875 scorpion_evt_setup(idx, hwc->config_base);
1876 else if (idx != ARMV7_IDX_CYCLE_COUNTER)
1877 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1878
1879
1880 armv7_pmnc_enable_intens(idx);
1881
1882
1883 armv7_pmnc_enable_counter(idx);
1884
1885 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1886}
1887
1888static void scorpion_pmu_reset(void *info)
1889{
1890 u32 vval, fval;
1891 struct arm_pmu *cpu_pmu = info;
1892 u32 idx, nb_cnt = cpu_pmu->num_events;
1893
1894 armv7pmu_reset(info);
1895
1896
1897 scorpion_write_pmresrn(0, 0);
1898 scorpion_write_pmresrn(1, 0);
1899 scorpion_write_pmresrn(2, 0);
1900 scorpion_write_pmresrn(3, 0);
1901
1902 venum_pre_pmresr(&vval, &fval);
1903 venum_write_pmresr(0);
1904 venum_post_pmresr(vval, fval);
1905
1906
1907 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1908 armv7_pmnc_select_counter(idx);
1909 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1910 }
1911}
1912
1913static int scorpion_event_to_bit(struct perf_event *event, unsigned int region,
1914 unsigned int group)
1915{
1916 int bit;
1917 struct hw_perf_event *hwc = &event->hw;
1918 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1919
1920 if (hwc->config_base & VENUM_EVENT)
1921 bit = SCORPION_VLPM_GROUP0;
1922 else
1923 bit = scorpion_get_pmresrn_event(region);
1924 bit -= scorpion_get_pmresrn_event(0);
1925 bit += group;
1926
1927
1928
1929
1930 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1931
1932 return bit;
1933}
1934
1935
1936
1937
1938
1939static int scorpion_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1940 struct perf_event *event)
1941{
1942 int idx;
1943 int bit = -1;
1944 struct hw_perf_event *hwc = &event->hw;
1945 unsigned int region = EVENT_REGION(hwc->config_base);
1946 unsigned int group = EVENT_GROUP(hwc->config_base);
1947 bool venum_event = EVENT_VENUM(hwc->config_base);
1948 bool scorpion_event = EVENT_CPU(hwc->config_base);
1949
1950 if (venum_event || scorpion_event) {
1951
1952 if (group > 3 || region > 3)
1953 return -EINVAL;
1954
1955 bit = scorpion_event_to_bit(event, region, group);
1956 if (test_and_set_bit(bit, cpuc->used_mask))
1957 return -EAGAIN;
1958 }
1959
1960 idx = armv7pmu_get_event_idx(cpuc, event);
1961 if (idx < 0 && bit >= 0)
1962 clear_bit(bit, cpuc->used_mask);
1963
1964 return idx;
1965}
1966
1967static void scorpion_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1968 struct perf_event *event)
1969{
1970 int bit;
1971 struct hw_perf_event *hwc = &event->hw;
1972 unsigned int region = EVENT_REGION(hwc->config_base);
1973 unsigned int group = EVENT_GROUP(hwc->config_base);
1974 bool venum_event = EVENT_VENUM(hwc->config_base);
1975 bool scorpion_event = EVENT_CPU(hwc->config_base);
1976
1977 armv7pmu_clear_event_idx(cpuc, event);
1978 if (venum_event || scorpion_event) {
1979 bit = scorpion_event_to_bit(event, region, group);
1980 clear_bit(bit, cpuc->used_mask);
1981 }
1982}
1983
1984static int scorpion_pmu_init(struct arm_pmu *cpu_pmu)
1985{
1986 armv7pmu_init(cpu_pmu);
1987 cpu_pmu->name = "armv7_scorpion";
1988 cpu_pmu->map_event = scorpion_map_event;
1989 cpu_pmu->reset = scorpion_pmu_reset;
1990 cpu_pmu->enable = scorpion_pmu_enable_event;
1991 cpu_pmu->disable = scorpion_pmu_disable_event;
1992 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
1993 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
1994 return armv7_probe_num_events(cpu_pmu);
1995}
1996
1997static int scorpion_mp_pmu_init(struct arm_pmu *cpu_pmu)
1998{
1999 armv7pmu_init(cpu_pmu);
2000 cpu_pmu->name = "armv7_scorpion_mp";
2001 cpu_pmu->map_event = scorpion_map_event;
2002 cpu_pmu->reset = scorpion_pmu_reset;
2003 cpu_pmu->enable = scorpion_pmu_enable_event;
2004 cpu_pmu->disable = scorpion_pmu_disable_event;
2005 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
2006 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
2007 return armv7_probe_num_events(cpu_pmu);
2008}
2009
2010static const struct of_device_id armv7_pmu_of_device_ids[] = {
2011 {.compatible = "arm,cortex-a17-pmu", .data = armv7_a17_pmu_init},
2012 {.compatible = "arm,cortex-a15-pmu", .data = armv7_a15_pmu_init},
2013 {.compatible = "arm,cortex-a12-pmu", .data = armv7_a12_pmu_init},
2014 {.compatible = "arm,cortex-a9-pmu", .data = armv7_a9_pmu_init},
2015 {.compatible = "arm,cortex-a8-pmu", .data = armv7_a8_pmu_init},
2016 {.compatible = "arm,cortex-a7-pmu", .data = armv7_a7_pmu_init},
2017 {.compatible = "arm,cortex-a5-pmu", .data = armv7_a5_pmu_init},
2018 {.compatible = "qcom,krait-pmu", .data = krait_pmu_init},
2019 {.compatible = "qcom,scorpion-pmu", .data = scorpion_pmu_init},
2020 {.compatible = "qcom,scorpion-mp-pmu", .data = scorpion_mp_pmu_init},
2021 {},
2022};
2023
2024static const struct pmu_probe_info armv7_pmu_probe_table[] = {
2025 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A8, armv7_a8_pmu_init),
2026 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A9, armv7_a9_pmu_init),
2027 { }
2028};
2029
2030
2031static int armv7_pmu_device_probe(struct platform_device *pdev)
2032{
2033 return arm_pmu_device_probe(pdev, armv7_pmu_of_device_ids,
2034 armv7_pmu_probe_table);
2035}
2036
2037static struct platform_driver armv7_pmu_driver = {
2038 .driver = {
2039 .name = "armv7-pmu",
2040 .of_match_table = armv7_pmu_of_device_ids,
2041 .suppress_bind_attrs = true,
2042 },
2043 .probe = armv7_pmu_device_probe,
2044};
2045
2046builtin_platform_driver(armv7_pmu_driver);
2047#endif
2048