1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19#ifdef CONFIG_CPU_V7
20
21#include <asm/cp15.h>
22#include <asm/cputype.h>
23#include <asm/irq_regs.h>
24#include <asm/vfp.h>
25#include "../vfp/vfpinstr.h"
26
27#include <linux/of.h>
28#include <linux/perf/arm_pmu.h>
29#include <linux/platform_device.h>
30
31
32
33
34
35
36
37
38#define ARMV7_PERFCTR_PMNC_SW_INCR 0x00
39#define ARMV7_PERFCTR_L1_ICACHE_REFILL 0x01
40#define ARMV7_PERFCTR_ITLB_REFILL 0x02
41#define ARMV7_PERFCTR_L1_DCACHE_REFILL 0x03
42#define ARMV7_PERFCTR_L1_DCACHE_ACCESS 0x04
43#define ARMV7_PERFCTR_DTLB_REFILL 0x05
44#define ARMV7_PERFCTR_MEM_READ 0x06
45#define ARMV7_PERFCTR_MEM_WRITE 0x07
46#define ARMV7_PERFCTR_INSTR_EXECUTED 0x08
47#define ARMV7_PERFCTR_EXC_TAKEN 0x09
48#define ARMV7_PERFCTR_EXC_EXECUTED 0x0A
49#define ARMV7_PERFCTR_CID_WRITE 0x0B
50
51
52
53
54
55
56
57
58#define ARMV7_PERFCTR_PC_WRITE 0x0C
59#define ARMV7_PERFCTR_PC_IMM_BRANCH 0x0D
60#define ARMV7_PERFCTR_PC_PROC_RETURN 0x0E
61#define ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS 0x0F
62#define ARMV7_PERFCTR_PC_BRANCH_MIS_PRED 0x10
63#define ARMV7_PERFCTR_CLOCK_CYCLES 0x11
64#define ARMV7_PERFCTR_PC_BRANCH_PRED 0x12
65
66
67#define ARMV7_PERFCTR_MEM_ACCESS 0x13
68#define ARMV7_PERFCTR_L1_ICACHE_ACCESS 0x14
69#define ARMV7_PERFCTR_L1_DCACHE_WB 0x15
70#define ARMV7_PERFCTR_L2_CACHE_ACCESS 0x16
71#define ARMV7_PERFCTR_L2_CACHE_REFILL 0x17
72#define ARMV7_PERFCTR_L2_CACHE_WB 0x18
73#define ARMV7_PERFCTR_BUS_ACCESS 0x19
74#define ARMV7_PERFCTR_MEM_ERROR 0x1A
75#define ARMV7_PERFCTR_INSTR_SPEC 0x1B
76#define ARMV7_PERFCTR_TTBR_WRITE 0x1C
77#define ARMV7_PERFCTR_BUS_CYCLES 0x1D
78
79#define ARMV7_PERFCTR_CPU_CYCLES 0xFF
80
81
82#define ARMV7_A8_PERFCTR_L2_CACHE_ACCESS 0x43
83#define ARMV7_A8_PERFCTR_L2_CACHE_REFILL 0x44
84#define ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS 0x50
85#define ARMV7_A8_PERFCTR_STALL_ISIDE 0x56
86
87
88#define ARMV7_A9_PERFCTR_INSTR_CORE_RENAME 0x68
89#define ARMV7_A9_PERFCTR_STALL_ICACHE 0x60
90#define ARMV7_A9_PERFCTR_STALL_DISPATCH 0x66
91
92
93#define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL 0xc2
94#define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP 0xc3
95
96
97#define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
98#define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
99#define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ 0x42
100#define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE 0x43
101
102#define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ 0x4C
103#define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE 0x4D
104
105#define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ 0x50
106#define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
107#define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ 0x52
108#define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE 0x53
109
110#define ARMV7_A15_PERFCTR_PC_WRITE_SPEC 0x76
111
112
113#define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
114#define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
115
116#define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ 0x50
117#define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
118
119#define ARMV7_A12_PERFCTR_PC_WRITE_SPEC 0x76
120
121#define ARMV7_A12_PERFCTR_PF_TLB_REFILL 0xe7
122
123
124#define KRAIT_PMRESR0_GROUP0 0xcc
125#define KRAIT_PMRESR1_GROUP0 0xd0
126#define KRAIT_PMRESR2_GROUP0 0xd4
127#define KRAIT_VPMRESR0_GROUP0 0xd8
128
129#define KRAIT_PERFCTR_L1_ICACHE_ACCESS 0x10011
130#define KRAIT_PERFCTR_L1_ICACHE_MISS 0x10010
131
132#define KRAIT_PERFCTR_L1_ITLB_ACCESS 0x12222
133#define KRAIT_PERFCTR_L1_DTLB_ACCESS 0x12210
134
135
136#define SCORPION_LPM0_GROUP0 0x4c
137#define SCORPION_LPM1_GROUP0 0x50
138#define SCORPION_LPM2_GROUP0 0x54
139#define SCORPION_L2LPM_GROUP0 0x58
140#define SCORPION_VLPM_GROUP0 0x5c
141
142#define SCORPION_ICACHE_ACCESS 0x10053
143#define SCORPION_ICACHE_MISS 0x10052
144
145#define SCORPION_DTLB_ACCESS 0x12013
146#define SCORPION_DTLB_MISS 0x12012
147
148#define SCORPION_ITLB_MISS 0x12021
149
150
151
152
153
154
155
156
157static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
158 PERF_MAP_ALL_UNSUPPORTED,
159 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
160 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
161 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
162 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
163 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
164 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
165 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A8_PERFCTR_STALL_ISIDE,
166};
167
168static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
169 [PERF_COUNT_HW_CACHE_OP_MAX]
170 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
171 PERF_CACHE_MAP_ALL_UNSUPPORTED,
172
173
174
175
176
177
178 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
179 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
180 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
181 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
182
183 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS,
184 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
185
186 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
187 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
188 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
189 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
190
191 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
192 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
193
194 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
195 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
196
197 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
198 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
199 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
200 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
201};
202
203
204
205
206static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
207 PERF_MAP_ALL_UNSUPPORTED,
208 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
209 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_A9_PERFCTR_INSTR_CORE_RENAME,
210 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
211 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
212 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
213 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
214 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A9_PERFCTR_STALL_ICACHE,
215 [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV7_A9_PERFCTR_STALL_DISPATCH,
216};
217
218static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
219 [PERF_COUNT_HW_CACHE_OP_MAX]
220 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
221 PERF_CACHE_MAP_ALL_UNSUPPORTED,
222
223
224
225
226
227
228 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
229 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
230 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
231 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
232
233 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
234
235 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
236 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
237
238 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
239 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
240
241 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
242 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
243 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
244 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
245};
246
247
248
249
250static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
251 PERF_MAP_ALL_UNSUPPORTED,
252 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
253 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
254 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
255 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
256 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
257 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
258};
259
260static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
261 [PERF_COUNT_HW_CACHE_OP_MAX]
262 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
263 PERF_CACHE_MAP_ALL_UNSUPPORTED,
264
265 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
266 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
267 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
268 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
269 [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
270 [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
271
272 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
273 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
274
275
276
277
278 [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
279 [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
280
281 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
282 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
283
284 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
285 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
286
287 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
288 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
289 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
290 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
291};
292
293
294
295
296static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
297 PERF_MAP_ALL_UNSUPPORTED,
298 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
299 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
300 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
301 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
302 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A15_PERFCTR_PC_WRITE_SPEC,
303 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
304 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
305};
306
307static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
308 [PERF_COUNT_HW_CACHE_OP_MAX]
309 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
310 PERF_CACHE_MAP_ALL_UNSUPPORTED,
311
312 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ,
313 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ,
314 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE,
315 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE,
316
317
318
319
320
321
322 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
323 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
324
325 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ,
326 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ,
327 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE,
328 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE,
329
330 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ,
331 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE,
332
333 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
334 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
335
336 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
337 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
338 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
339 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
340};
341
342
343
344
345static const unsigned armv7_a7_perf_map[PERF_COUNT_HW_MAX] = {
346 PERF_MAP_ALL_UNSUPPORTED,
347 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
348 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
349 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
350 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
351 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
352 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
353 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
354};
355
356static const unsigned armv7_a7_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
357 [PERF_COUNT_HW_CACHE_OP_MAX]
358 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
359 PERF_CACHE_MAP_ALL_UNSUPPORTED,
360
361
362
363
364
365
366 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
367 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
368 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
369 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
370
371 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
372 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
373
374 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
375 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
376 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
377 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
378
379 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
380 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
381
382 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
383 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
384
385 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
386 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
387 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
388 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
389};
390
391
392
393
394static const unsigned armv7_a12_perf_map[PERF_COUNT_HW_MAX] = {
395 PERF_MAP_ALL_UNSUPPORTED,
396 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
397 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
398 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
399 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
400 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A12_PERFCTR_PC_WRITE_SPEC,
401 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
402 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
403};
404
405static const unsigned armv7_a12_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
406 [PERF_COUNT_HW_CACHE_OP_MAX]
407 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
408 PERF_CACHE_MAP_ALL_UNSUPPORTED,
409
410 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ,
411 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
412 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE,
413 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
414
415
416
417
418
419
420 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
421 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
422
423 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ,
424 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
425 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE,
426 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
427
428 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
429 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
430 [C(DTLB)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A12_PERFCTR_PF_TLB_REFILL,
431
432 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
433 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
434
435 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
436 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
437 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
438 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
439};
440
441
442
443
444static const unsigned krait_perf_map[PERF_COUNT_HW_MAX] = {
445 PERF_MAP_ALL_UNSUPPORTED,
446 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
447 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
448 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
449 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
450 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
451};
452
453static const unsigned krait_perf_map_no_branch[PERF_COUNT_HW_MAX] = {
454 PERF_MAP_ALL_UNSUPPORTED,
455 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
456 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
457 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
458 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
459};
460
461static const unsigned krait_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
462 [PERF_COUNT_HW_CACHE_OP_MAX]
463 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
464 PERF_CACHE_MAP_ALL_UNSUPPORTED,
465
466
467
468
469
470
471 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
472 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
473 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
474 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
475
476 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ICACHE_ACCESS,
477 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = KRAIT_PERFCTR_L1_ICACHE_MISS,
478
479 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
480 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
481
482 [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
483 [C(ITLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
484
485 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
486 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
487 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
488 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
489};
490
491
492
493
494static const unsigned scorpion_perf_map[PERF_COUNT_HW_MAX] = {
495 PERF_MAP_ALL_UNSUPPORTED,
496 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
497 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
498 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
499 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
500 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
501};
502
503static const unsigned scorpion_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
504 [PERF_COUNT_HW_CACHE_OP_MAX]
505 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
506 PERF_CACHE_MAP_ALL_UNSUPPORTED,
507
508
509
510
511
512 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
513 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
514 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
515 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
516 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_ICACHE_ACCESS,
517 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ICACHE_MISS,
518
519
520
521
522 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
523 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
524 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
525 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
526 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
527 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
528 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
529 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
530 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
531 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
532};
533
534PMU_FORMAT_ATTR(event, "config:0-7");
535
536static struct attribute *armv7_pmu_format_attrs[] = {
537 &format_attr_event.attr,
538 NULL,
539};
540
541static struct attribute_group armv7_pmu_format_attr_group = {
542 .name = "format",
543 .attrs = armv7_pmu_format_attrs,
544};
545
546#define ARMV7_EVENT_ATTR_RESOLVE(m) #m
547#define ARMV7_EVENT_ATTR(name, config) \
548 PMU_EVENT_ATTR_STRING(name, armv7_event_attr_##name, \
549 "event=" ARMV7_EVENT_ATTR_RESOLVE(config))
550
551ARMV7_EVENT_ATTR(sw_incr, ARMV7_PERFCTR_PMNC_SW_INCR);
552ARMV7_EVENT_ATTR(l1i_cache_refill, ARMV7_PERFCTR_L1_ICACHE_REFILL);
553ARMV7_EVENT_ATTR(l1i_tlb_refill, ARMV7_PERFCTR_ITLB_REFILL);
554ARMV7_EVENT_ATTR(l1d_cache_refill, ARMV7_PERFCTR_L1_DCACHE_REFILL);
555ARMV7_EVENT_ATTR(l1d_cache, ARMV7_PERFCTR_L1_DCACHE_ACCESS);
556ARMV7_EVENT_ATTR(l1d_tlb_refill, ARMV7_PERFCTR_DTLB_REFILL);
557ARMV7_EVENT_ATTR(ld_retired, ARMV7_PERFCTR_MEM_READ);
558ARMV7_EVENT_ATTR(st_retired, ARMV7_PERFCTR_MEM_WRITE);
559ARMV7_EVENT_ATTR(inst_retired, ARMV7_PERFCTR_INSTR_EXECUTED);
560ARMV7_EVENT_ATTR(exc_taken, ARMV7_PERFCTR_EXC_TAKEN);
561ARMV7_EVENT_ATTR(exc_return, ARMV7_PERFCTR_EXC_EXECUTED);
562ARMV7_EVENT_ATTR(cid_write_retired, ARMV7_PERFCTR_CID_WRITE);
563ARMV7_EVENT_ATTR(pc_write_retired, ARMV7_PERFCTR_PC_WRITE);
564ARMV7_EVENT_ATTR(br_immed_retired, ARMV7_PERFCTR_PC_IMM_BRANCH);
565ARMV7_EVENT_ATTR(br_return_retired, ARMV7_PERFCTR_PC_PROC_RETURN);
566ARMV7_EVENT_ATTR(unaligned_ldst_retired, ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS);
567ARMV7_EVENT_ATTR(br_mis_pred, ARMV7_PERFCTR_PC_BRANCH_MIS_PRED);
568ARMV7_EVENT_ATTR(cpu_cycles, ARMV7_PERFCTR_CLOCK_CYCLES);
569ARMV7_EVENT_ATTR(br_pred, ARMV7_PERFCTR_PC_BRANCH_PRED);
570
571static struct attribute *armv7_pmuv1_event_attrs[] = {
572 &armv7_event_attr_sw_incr.attr.attr,
573 &armv7_event_attr_l1i_cache_refill.attr.attr,
574 &armv7_event_attr_l1i_tlb_refill.attr.attr,
575 &armv7_event_attr_l1d_cache_refill.attr.attr,
576 &armv7_event_attr_l1d_cache.attr.attr,
577 &armv7_event_attr_l1d_tlb_refill.attr.attr,
578 &armv7_event_attr_ld_retired.attr.attr,
579 &armv7_event_attr_st_retired.attr.attr,
580 &armv7_event_attr_inst_retired.attr.attr,
581 &armv7_event_attr_exc_taken.attr.attr,
582 &armv7_event_attr_exc_return.attr.attr,
583 &armv7_event_attr_cid_write_retired.attr.attr,
584 &armv7_event_attr_pc_write_retired.attr.attr,
585 &armv7_event_attr_br_immed_retired.attr.attr,
586 &armv7_event_attr_br_return_retired.attr.attr,
587 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
588 &armv7_event_attr_br_mis_pred.attr.attr,
589 &armv7_event_attr_cpu_cycles.attr.attr,
590 &armv7_event_attr_br_pred.attr.attr,
591 NULL,
592};
593
594static struct attribute_group armv7_pmuv1_events_attr_group = {
595 .name = "events",
596 .attrs = armv7_pmuv1_event_attrs,
597};
598
599ARMV7_EVENT_ATTR(mem_access, ARMV7_PERFCTR_MEM_ACCESS);
600ARMV7_EVENT_ATTR(l1i_cache, ARMV7_PERFCTR_L1_ICACHE_ACCESS);
601ARMV7_EVENT_ATTR(l1d_cache_wb, ARMV7_PERFCTR_L1_DCACHE_WB);
602ARMV7_EVENT_ATTR(l2d_cache, ARMV7_PERFCTR_L2_CACHE_ACCESS);
603ARMV7_EVENT_ATTR(l2d_cache_refill, ARMV7_PERFCTR_L2_CACHE_REFILL);
604ARMV7_EVENT_ATTR(l2d_cache_wb, ARMV7_PERFCTR_L2_CACHE_WB);
605ARMV7_EVENT_ATTR(bus_access, ARMV7_PERFCTR_BUS_ACCESS);
606ARMV7_EVENT_ATTR(memory_error, ARMV7_PERFCTR_MEM_ERROR);
607ARMV7_EVENT_ATTR(inst_spec, ARMV7_PERFCTR_INSTR_SPEC);
608ARMV7_EVENT_ATTR(ttbr_write_retired, ARMV7_PERFCTR_TTBR_WRITE);
609ARMV7_EVENT_ATTR(bus_cycles, ARMV7_PERFCTR_BUS_CYCLES);
610
611static struct attribute *armv7_pmuv2_event_attrs[] = {
612 &armv7_event_attr_sw_incr.attr.attr,
613 &armv7_event_attr_l1i_cache_refill.attr.attr,
614 &armv7_event_attr_l1i_tlb_refill.attr.attr,
615 &armv7_event_attr_l1d_cache_refill.attr.attr,
616 &armv7_event_attr_l1d_cache.attr.attr,
617 &armv7_event_attr_l1d_tlb_refill.attr.attr,
618 &armv7_event_attr_ld_retired.attr.attr,
619 &armv7_event_attr_st_retired.attr.attr,
620 &armv7_event_attr_inst_retired.attr.attr,
621 &armv7_event_attr_exc_taken.attr.attr,
622 &armv7_event_attr_exc_return.attr.attr,
623 &armv7_event_attr_cid_write_retired.attr.attr,
624 &armv7_event_attr_pc_write_retired.attr.attr,
625 &armv7_event_attr_br_immed_retired.attr.attr,
626 &armv7_event_attr_br_return_retired.attr.attr,
627 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
628 &armv7_event_attr_br_mis_pred.attr.attr,
629 &armv7_event_attr_cpu_cycles.attr.attr,
630 &armv7_event_attr_br_pred.attr.attr,
631 &armv7_event_attr_mem_access.attr.attr,
632 &armv7_event_attr_l1i_cache.attr.attr,
633 &armv7_event_attr_l1d_cache_wb.attr.attr,
634 &armv7_event_attr_l2d_cache.attr.attr,
635 &armv7_event_attr_l2d_cache_refill.attr.attr,
636 &armv7_event_attr_l2d_cache_wb.attr.attr,
637 &armv7_event_attr_bus_access.attr.attr,
638 &armv7_event_attr_memory_error.attr.attr,
639 &armv7_event_attr_inst_spec.attr.attr,
640 &armv7_event_attr_ttbr_write_retired.attr.attr,
641 &armv7_event_attr_bus_cycles.attr.attr,
642 NULL,
643};
644
645static struct attribute_group armv7_pmuv2_events_attr_group = {
646 .name = "events",
647 .attrs = armv7_pmuv2_event_attrs,
648};
649
650
651
652
653#define ARMV7_IDX_CYCLE_COUNTER 0
654#define ARMV7_IDX_COUNTER0 1
655#define ARMV7_IDX_COUNTER_LAST(cpu_pmu) \
656 (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
657
658#define ARMV7_MAX_COUNTERS 32
659#define ARMV7_COUNTER_MASK (ARMV7_MAX_COUNTERS - 1)
660
661
662
663
664
665
666
667
668#define ARMV7_IDX_TO_COUNTER(x) \
669 (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
670
671
672
673
674#define ARMV7_PMNC_E (1 << 0)
675#define ARMV7_PMNC_P (1 << 1)
676#define ARMV7_PMNC_C (1 << 2)
677#define ARMV7_PMNC_D (1 << 3)
678#define ARMV7_PMNC_X (1 << 4)
679#define ARMV7_PMNC_DP (1 << 5)
680#define ARMV7_PMNC_N_SHIFT 11
681#define ARMV7_PMNC_N_MASK 0x1f
682#define ARMV7_PMNC_MASK 0x3f
683
684
685
686
687#define ARMV7_FLAG_MASK 0xffffffff
688#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
689
690
691
692
693#define ARMV7_EVTYPE_MASK 0xc80000ff
694#define ARMV7_EVTYPE_EVENT 0xff
695
696
697
698
699#define ARMV7_EXCLUDE_PL1 (1 << 31)
700#define ARMV7_EXCLUDE_USER (1 << 30)
701#define ARMV7_INCLUDE_HYP (1 << 27)
702
703
704
705
706#define ARMV7_SDER_SUNIDEN BIT(1)
707
708static inline u32 armv7_pmnc_read(void)
709{
710 u32 val;
711 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
712 return val;
713}
714
715static inline void armv7_pmnc_write(u32 val)
716{
717 val &= ARMV7_PMNC_MASK;
718 isb();
719 asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
720}
721
722static inline int armv7_pmnc_has_overflowed(u32 pmnc)
723{
724 return pmnc & ARMV7_OVERFLOWED_MASK;
725}
726
727static inline int armv7_pmnc_counter_valid(struct arm_pmu *cpu_pmu, int idx)
728{
729 return idx >= ARMV7_IDX_CYCLE_COUNTER &&
730 idx <= ARMV7_IDX_COUNTER_LAST(cpu_pmu);
731}
732
733static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
734{
735 return pmnc & BIT(ARMV7_IDX_TO_COUNTER(idx));
736}
737
738static inline void armv7_pmnc_select_counter(int idx)
739{
740 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
741 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
742 isb();
743}
744
745static inline u32 armv7pmu_read_counter(struct perf_event *event)
746{
747 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
748 struct hw_perf_event *hwc = &event->hw;
749 int idx = hwc->idx;
750 u32 value = 0;
751
752 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
753 pr_err("CPU%u reading wrong counter %d\n",
754 smp_processor_id(), idx);
755 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
756 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
757 } else {
758 armv7_pmnc_select_counter(idx);
759 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
760 }
761
762 return value;
763}
764
765static inline void armv7pmu_write_counter(struct perf_event *event, u32 value)
766{
767 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
768 struct hw_perf_event *hwc = &event->hw;
769 int idx = hwc->idx;
770
771 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
772 pr_err("CPU%u writing wrong counter %d\n",
773 smp_processor_id(), idx);
774 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
775 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
776 } else {
777 armv7_pmnc_select_counter(idx);
778 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
779 }
780}
781
782static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
783{
784 armv7_pmnc_select_counter(idx);
785 val &= ARMV7_EVTYPE_MASK;
786 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
787}
788
789static inline void armv7_pmnc_enable_counter(int idx)
790{
791 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
792 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
793}
794
795static inline void armv7_pmnc_disable_counter(int idx)
796{
797 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
798 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
799}
800
801static inline void armv7_pmnc_enable_intens(int idx)
802{
803 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
804 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
805}
806
807static inline void armv7_pmnc_disable_intens(int idx)
808{
809 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
810 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
811 isb();
812
813 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (BIT(counter)));
814 isb();
815}
816
817static inline u32 armv7_pmnc_getreset_flags(void)
818{
819 u32 val;
820
821
822 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
823
824
825 val &= ARMV7_FLAG_MASK;
826 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
827
828 return val;
829}
830
831#ifdef DEBUG
832static void armv7_pmnc_dump_regs(struct arm_pmu *cpu_pmu)
833{
834 u32 val;
835 unsigned int cnt;
836
837 pr_info("PMNC registers dump:\n");
838
839 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
840 pr_info("PMNC =0x%08x\n", val);
841
842 asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
843 pr_info("CNTENS=0x%08x\n", val);
844
845 asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
846 pr_info("INTENS=0x%08x\n", val);
847
848 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
849 pr_info("FLAGS =0x%08x\n", val);
850
851 asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
852 pr_info("SELECT=0x%08x\n", val);
853
854 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
855 pr_info("CCNT =0x%08x\n", val);
856
857 for (cnt = ARMV7_IDX_COUNTER0;
858 cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
859 armv7_pmnc_select_counter(cnt);
860 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
861 pr_info("CNT[%d] count =0x%08x\n",
862 ARMV7_IDX_TO_COUNTER(cnt), val);
863 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
864 pr_info("CNT[%d] evtsel=0x%08x\n",
865 ARMV7_IDX_TO_COUNTER(cnt), val);
866 }
867}
868#endif
869
870static void armv7pmu_enable_event(struct perf_event *event)
871{
872 unsigned long flags;
873 struct hw_perf_event *hwc = &event->hw;
874 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
875 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
876 int idx = hwc->idx;
877
878 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
879 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
880 smp_processor_id(), idx);
881 return;
882 }
883
884
885
886
887
888 raw_spin_lock_irqsave(&events->pmu_lock, flags);
889
890
891
892
893 armv7_pmnc_disable_counter(idx);
894
895
896
897
898
899
900 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER)
901 armv7_pmnc_write_evtsel(idx, hwc->config_base);
902
903
904
905
906 armv7_pmnc_enable_intens(idx);
907
908
909
910
911 armv7_pmnc_enable_counter(idx);
912
913 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
914}
915
916static void armv7pmu_disable_event(struct perf_event *event)
917{
918 unsigned long flags;
919 struct hw_perf_event *hwc = &event->hw;
920 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
921 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
922 int idx = hwc->idx;
923
924 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
925 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
926 smp_processor_id(), idx);
927 return;
928 }
929
930
931
932
933 raw_spin_lock_irqsave(&events->pmu_lock, flags);
934
935
936
937
938 armv7_pmnc_disable_counter(idx);
939
940
941
942
943 armv7_pmnc_disable_intens(idx);
944
945 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
946}
947
948static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
949{
950 u32 pmnc;
951 struct perf_sample_data data;
952 struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
953 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
954 struct pt_regs *regs;
955 int idx;
956
957
958
959
960 pmnc = armv7_pmnc_getreset_flags();
961
962
963
964
965 if (!armv7_pmnc_has_overflowed(pmnc))
966 return IRQ_NONE;
967
968
969
970
971 regs = get_irq_regs();
972
973 for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
974 struct perf_event *event = cpuc->events[idx];
975 struct hw_perf_event *hwc;
976
977
978 if (!event)
979 continue;
980
981
982
983
984
985 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
986 continue;
987
988 hwc = &event->hw;
989 armpmu_event_update(event);
990 perf_sample_data_init(&data, 0, hwc->last_period);
991 if (!armpmu_event_set_period(event))
992 continue;
993
994 if (perf_event_overflow(event, &data, regs))
995 cpu_pmu->disable(event);
996 }
997
998
999
1000
1001
1002
1003
1004
1005 irq_work_run();
1006
1007 return IRQ_HANDLED;
1008}
1009
1010static void armv7pmu_start(struct arm_pmu *cpu_pmu)
1011{
1012 unsigned long flags;
1013 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1014
1015 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1016
1017 armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1018 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1019}
1020
1021static void armv7pmu_stop(struct arm_pmu *cpu_pmu)
1022{
1023 unsigned long flags;
1024 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1025
1026 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1027
1028 armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1029 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1030}
1031
1032static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc,
1033 struct perf_event *event)
1034{
1035 int idx;
1036 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1037 struct hw_perf_event *hwc = &event->hw;
1038 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT;
1039
1040
1041 if (evtype == ARMV7_PERFCTR_CPU_CYCLES) {
1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1043 return -EAGAIN;
1044
1045 return ARMV7_IDX_CYCLE_COUNTER;
1046 }
1047
1048
1049
1050
1051
1052 for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
1053 if (!test_and_set_bit(idx, cpuc->used_mask))
1054 return idx;
1055 }
1056
1057
1058 return -EAGAIN;
1059}
1060
1061
1062
1063
1064static int armv7pmu_set_event_filter(struct hw_perf_event *event,
1065 struct perf_event_attr *attr)
1066{
1067 unsigned long config_base = 0;
1068
1069 if (attr->exclude_idle)
1070 return -EPERM;
1071 if (attr->exclude_user)
1072 config_base |= ARMV7_EXCLUDE_USER;
1073 if (attr->exclude_kernel)
1074 config_base |= ARMV7_EXCLUDE_PL1;
1075 if (!attr->exclude_hv)
1076 config_base |= ARMV7_INCLUDE_HYP;
1077
1078
1079
1080
1081
1082 event->config_base = config_base;
1083
1084 return 0;
1085}
1086
1087static void armv7pmu_reset(void *info)
1088{
1089 struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
1090 u32 idx, nb_cnt = cpu_pmu->num_events, val;
1091
1092 if (cpu_pmu->secure_access) {
1093 asm volatile("mrc p15, 0, %0, c1, c1, 1" : "=r" (val));
1094 val |= ARMV7_SDER_SUNIDEN;
1095 asm volatile("mcr p15, 0, %0, c1, c1, 1" : : "r" (val));
1096 }
1097
1098
1099 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1100 armv7_pmnc_disable_counter(idx);
1101 armv7_pmnc_disable_intens(idx);
1102 }
1103
1104
1105 armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1106}
1107
1108static int armv7_a8_map_event(struct perf_event *event)
1109{
1110 return armpmu_map_event(event, &armv7_a8_perf_map,
1111 &armv7_a8_perf_cache_map, 0xFF);
1112}
1113
1114static int armv7_a9_map_event(struct perf_event *event)
1115{
1116 return armpmu_map_event(event, &armv7_a9_perf_map,
1117 &armv7_a9_perf_cache_map, 0xFF);
1118}
1119
1120static int armv7_a5_map_event(struct perf_event *event)
1121{
1122 return armpmu_map_event(event, &armv7_a5_perf_map,
1123 &armv7_a5_perf_cache_map, 0xFF);
1124}
1125
1126static int armv7_a15_map_event(struct perf_event *event)
1127{
1128 return armpmu_map_event(event, &armv7_a15_perf_map,
1129 &armv7_a15_perf_cache_map, 0xFF);
1130}
1131
1132static int armv7_a7_map_event(struct perf_event *event)
1133{
1134 return armpmu_map_event(event, &armv7_a7_perf_map,
1135 &armv7_a7_perf_cache_map, 0xFF);
1136}
1137
1138static int armv7_a12_map_event(struct perf_event *event)
1139{
1140 return armpmu_map_event(event, &armv7_a12_perf_map,
1141 &armv7_a12_perf_cache_map, 0xFF);
1142}
1143
1144static int krait_map_event(struct perf_event *event)
1145{
1146 return armpmu_map_event(event, &krait_perf_map,
1147 &krait_perf_cache_map, 0xFFFFF);
1148}
1149
1150static int krait_map_event_no_branch(struct perf_event *event)
1151{
1152 return armpmu_map_event(event, &krait_perf_map_no_branch,
1153 &krait_perf_cache_map, 0xFFFFF);
1154}
1155
1156static int scorpion_map_event(struct perf_event *event)
1157{
1158 return armpmu_map_event(event, &scorpion_perf_map,
1159 &scorpion_perf_cache_map, 0xFFFFF);
1160}
1161
1162static void armv7pmu_init(struct arm_pmu *cpu_pmu)
1163{
1164 cpu_pmu->handle_irq = armv7pmu_handle_irq;
1165 cpu_pmu->enable = armv7pmu_enable_event;
1166 cpu_pmu->disable = armv7pmu_disable_event;
1167 cpu_pmu->read_counter = armv7pmu_read_counter;
1168 cpu_pmu->write_counter = armv7pmu_write_counter;
1169 cpu_pmu->get_event_idx = armv7pmu_get_event_idx;
1170 cpu_pmu->start = armv7pmu_start;
1171 cpu_pmu->stop = armv7pmu_stop;
1172 cpu_pmu->reset = armv7pmu_reset;
1173 cpu_pmu->max_period = (1LLU << 32) - 1;
1174};
1175
1176static void armv7_read_num_pmnc_events(void *info)
1177{
1178 int *nb_cnt = info;
1179
1180
1181 *nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1182
1183
1184 *nb_cnt += 1;
1185}
1186
1187static int armv7_probe_num_events(struct arm_pmu *arm_pmu)
1188{
1189 return smp_call_function_any(&arm_pmu->supported_cpus,
1190 armv7_read_num_pmnc_events,
1191 &arm_pmu->num_events, 1);
1192}
1193
1194static int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1195{
1196 armv7pmu_init(cpu_pmu);
1197 cpu_pmu->name = "armv7_cortex_a8";
1198 cpu_pmu->map_event = armv7_a8_map_event;
1199 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1200 &armv7_pmuv1_events_attr_group;
1201 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1202 &armv7_pmu_format_attr_group;
1203 return armv7_probe_num_events(cpu_pmu);
1204}
1205
1206static int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1207{
1208 armv7pmu_init(cpu_pmu);
1209 cpu_pmu->name = "armv7_cortex_a9";
1210 cpu_pmu->map_event = armv7_a9_map_event;
1211 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1212 &armv7_pmuv1_events_attr_group;
1213 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1214 &armv7_pmu_format_attr_group;
1215 return armv7_probe_num_events(cpu_pmu);
1216}
1217
1218static int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1219{
1220 armv7pmu_init(cpu_pmu);
1221 cpu_pmu->name = "armv7_cortex_a5";
1222 cpu_pmu->map_event = armv7_a5_map_event;
1223 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1224 &armv7_pmuv1_events_attr_group;
1225 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1226 &armv7_pmu_format_attr_group;
1227 return armv7_probe_num_events(cpu_pmu);
1228}
1229
1230static int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1231{
1232 armv7pmu_init(cpu_pmu);
1233 cpu_pmu->name = "armv7_cortex_a15";
1234 cpu_pmu->map_event = armv7_a15_map_event;
1235 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1236 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1237 &armv7_pmuv2_events_attr_group;
1238 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1239 &armv7_pmu_format_attr_group;
1240 return armv7_probe_num_events(cpu_pmu);
1241}
1242
1243static int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1244{
1245 armv7pmu_init(cpu_pmu);
1246 cpu_pmu->name = "armv7_cortex_a7";
1247 cpu_pmu->map_event = armv7_a7_map_event;
1248 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1249 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1250 &armv7_pmuv2_events_attr_group;
1251 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1252 &armv7_pmu_format_attr_group;
1253 return armv7_probe_num_events(cpu_pmu);
1254}
1255
1256static int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1257{
1258 armv7pmu_init(cpu_pmu);
1259 cpu_pmu->name = "armv7_cortex_a12";
1260 cpu_pmu->map_event = armv7_a12_map_event;
1261 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1262 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1263 &armv7_pmuv2_events_attr_group;
1264 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1265 &armv7_pmu_format_attr_group;
1266 return armv7_probe_num_events(cpu_pmu);
1267}
1268
1269static int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
1270{
1271 int ret = armv7_a12_pmu_init(cpu_pmu);
1272 cpu_pmu->name = "armv7_cortex_a17";
1273 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1274 &armv7_pmuv2_events_attr_group;
1275 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1276 &armv7_pmu_format_attr_group;
1277 return ret;
1278}
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313#define KRAIT_EVENT (1 << 16)
1314#define VENUM_EVENT (2 << 16)
1315#define KRAIT_EVENT_MASK (KRAIT_EVENT | VENUM_EVENT)
1316#define PMRESRn_EN BIT(31)
1317
1318#define EVENT_REGION(event) (((event) >> 12) & 0xf)
1319#define EVENT_GROUP(event) ((event) & 0xf)
1320#define EVENT_CODE(event) (((event) >> 4) & 0xff)
1321#define EVENT_VENUM(event) (!!(event & VENUM_EVENT))
1322#define EVENT_CPU(event) (!!(event & KRAIT_EVENT))
1323
1324static u32 krait_read_pmresrn(int n)
1325{
1326 u32 val;
1327
1328 switch (n) {
1329 case 0:
1330 asm volatile("mrc p15, 1, %0, c9, c15, 0" : "=r" (val));
1331 break;
1332 case 1:
1333 asm volatile("mrc p15, 1, %0, c9, c15, 1" : "=r" (val));
1334 break;
1335 case 2:
1336 asm volatile("mrc p15, 1, %0, c9, c15, 2" : "=r" (val));
1337 break;
1338 default:
1339 BUG();
1340 }
1341
1342 return val;
1343}
1344
1345static void krait_write_pmresrn(int n, u32 val)
1346{
1347 switch (n) {
1348 case 0:
1349 asm volatile("mcr p15, 1, %0, c9, c15, 0" : : "r" (val));
1350 break;
1351 case 1:
1352 asm volatile("mcr p15, 1, %0, c9, c15, 1" : : "r" (val));
1353 break;
1354 case 2:
1355 asm volatile("mcr p15, 1, %0, c9, c15, 2" : : "r" (val));
1356 break;
1357 default:
1358 BUG();
1359 }
1360}
1361
1362static u32 venum_read_pmresr(void)
1363{
1364 u32 val;
1365 asm volatile("mrc p10, 7, %0, c11, c0, 0" : "=r" (val));
1366 return val;
1367}
1368
1369static void venum_write_pmresr(u32 val)
1370{
1371 asm volatile("mcr p10, 7, %0, c11, c0, 0" : : "r" (val));
1372}
1373
1374static void venum_pre_pmresr(u32 *venum_orig_val, u32 *fp_orig_val)
1375{
1376 u32 venum_new_val;
1377 u32 fp_new_val;
1378
1379 BUG_ON(preemptible());
1380
1381 *venum_orig_val = get_copro_access();
1382 venum_new_val = *venum_orig_val | CPACC_SVC(10) | CPACC_SVC(11);
1383 set_copro_access(venum_new_val);
1384
1385
1386 *fp_orig_val = fmrx(FPEXC);
1387 fp_new_val = *fp_orig_val | FPEXC_EN;
1388 fmxr(FPEXC, fp_new_val);
1389}
1390
1391static void venum_post_pmresr(u32 venum_orig_val, u32 fp_orig_val)
1392{
1393 BUG_ON(preemptible());
1394
1395 fmxr(FPEXC, fp_orig_val);
1396 isb();
1397
1398 set_copro_access(venum_orig_val);
1399}
1400
1401static u32 krait_get_pmresrn_event(unsigned int region)
1402{
1403 static const u32 pmresrn_table[] = { KRAIT_PMRESR0_GROUP0,
1404 KRAIT_PMRESR1_GROUP0,
1405 KRAIT_PMRESR2_GROUP0 };
1406 return pmresrn_table[region];
1407}
1408
1409static void krait_evt_setup(int idx, u32 config_base)
1410{
1411 u32 val;
1412 u32 mask;
1413 u32 vval, fval;
1414 unsigned int region = EVENT_REGION(config_base);
1415 unsigned int group = EVENT_GROUP(config_base);
1416 unsigned int code = EVENT_CODE(config_base);
1417 unsigned int group_shift;
1418 bool venum_event = EVENT_VENUM(config_base);
1419
1420 group_shift = group * 8;
1421 mask = 0xff << group_shift;
1422
1423
1424 if (venum_event)
1425 val = KRAIT_VPMRESR0_GROUP0;
1426 else
1427 val = krait_get_pmresrn_event(region);
1428 val += group;
1429
1430 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1431 armv7_pmnc_write_evtsel(idx, val);
1432
1433 if (venum_event) {
1434 venum_pre_pmresr(&vval, &fval);
1435 val = venum_read_pmresr();
1436 val &= ~mask;
1437 val |= code << group_shift;
1438 val |= PMRESRn_EN;
1439 venum_write_pmresr(val);
1440 venum_post_pmresr(vval, fval);
1441 } else {
1442 val = krait_read_pmresrn(region);
1443 val &= ~mask;
1444 val |= code << group_shift;
1445 val |= PMRESRn_EN;
1446 krait_write_pmresrn(region, val);
1447 }
1448}
1449
1450static u32 clear_pmresrn_group(u32 val, int group)
1451{
1452 u32 mask;
1453 int group_shift;
1454
1455 group_shift = group * 8;
1456 mask = 0xff << group_shift;
1457 val &= ~mask;
1458
1459
1460 if (val & ~PMRESRn_EN)
1461 return val |= PMRESRn_EN;
1462
1463 return 0;
1464}
1465
1466static void krait_clearpmu(u32 config_base)
1467{
1468 u32 val;
1469 u32 vval, fval;
1470 unsigned int region = EVENT_REGION(config_base);
1471 unsigned int group = EVENT_GROUP(config_base);
1472 bool venum_event = EVENT_VENUM(config_base);
1473
1474 if (venum_event) {
1475 venum_pre_pmresr(&vval, &fval);
1476 val = venum_read_pmresr();
1477 val = clear_pmresrn_group(val, group);
1478 venum_write_pmresr(val);
1479 venum_post_pmresr(vval, fval);
1480 } else {
1481 val = krait_read_pmresrn(region);
1482 val = clear_pmresrn_group(val, group);
1483 krait_write_pmresrn(region, val);
1484 }
1485}
1486
1487static void krait_pmu_disable_event(struct perf_event *event)
1488{
1489 unsigned long flags;
1490 struct hw_perf_event *hwc = &event->hw;
1491 int idx = hwc->idx;
1492 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1493 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1494
1495
1496 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1497
1498
1499 armv7_pmnc_disable_counter(idx);
1500
1501
1502
1503
1504 if (hwc->config_base & KRAIT_EVENT_MASK)
1505 krait_clearpmu(hwc->config_base);
1506
1507
1508 armv7_pmnc_disable_intens(idx);
1509
1510 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1511}
1512
1513static void krait_pmu_enable_event(struct perf_event *event)
1514{
1515 unsigned long flags;
1516 struct hw_perf_event *hwc = &event->hw;
1517 int idx = hwc->idx;
1518 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1519 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1520
1521
1522
1523
1524
1525 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1526
1527
1528 armv7_pmnc_disable_counter(idx);
1529
1530
1531
1532
1533
1534
1535 if (hwc->config_base & KRAIT_EVENT_MASK)
1536 krait_evt_setup(idx, hwc->config_base);
1537 else
1538 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1539
1540
1541 armv7_pmnc_enable_intens(idx);
1542
1543
1544 armv7_pmnc_enable_counter(idx);
1545
1546 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1547}
1548
1549static void krait_pmu_reset(void *info)
1550{
1551 u32 vval, fval;
1552 struct arm_pmu *cpu_pmu = info;
1553 u32 idx, nb_cnt = cpu_pmu->num_events;
1554
1555 armv7pmu_reset(info);
1556
1557
1558 krait_write_pmresrn(0, 0);
1559 krait_write_pmresrn(1, 0);
1560 krait_write_pmresrn(2, 0);
1561
1562 venum_pre_pmresr(&vval, &fval);
1563 venum_write_pmresr(0);
1564 venum_post_pmresr(vval, fval);
1565
1566
1567 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1568 armv7_pmnc_select_counter(idx);
1569 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1570 }
1571
1572}
1573
1574static int krait_event_to_bit(struct perf_event *event, unsigned int region,
1575 unsigned int group)
1576{
1577 int bit;
1578 struct hw_perf_event *hwc = &event->hw;
1579 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1580
1581 if (hwc->config_base & VENUM_EVENT)
1582 bit = KRAIT_VPMRESR0_GROUP0;
1583 else
1584 bit = krait_get_pmresrn_event(region);
1585 bit -= krait_get_pmresrn_event(0);
1586 bit += group;
1587
1588
1589
1590
1591 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1592
1593 return bit;
1594}
1595
1596
1597
1598
1599
1600static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1601 struct perf_event *event)
1602{
1603 int idx;
1604 int bit = -1;
1605 struct hw_perf_event *hwc = &event->hw;
1606 unsigned int region = EVENT_REGION(hwc->config_base);
1607 unsigned int code = EVENT_CODE(hwc->config_base);
1608 unsigned int group = EVENT_GROUP(hwc->config_base);
1609 bool venum_event = EVENT_VENUM(hwc->config_base);
1610 bool krait_event = EVENT_CPU(hwc->config_base);
1611
1612 if (venum_event || krait_event) {
1613
1614 if (group > 3 || region > 2)
1615 return -EINVAL;
1616 if (venum_event && (code & 0xe0))
1617 return -EINVAL;
1618
1619 bit = krait_event_to_bit(event, region, group);
1620 if (test_and_set_bit(bit, cpuc->used_mask))
1621 return -EAGAIN;
1622 }
1623
1624 idx = armv7pmu_get_event_idx(cpuc, event);
1625 if (idx < 0 && bit >= 0)
1626 clear_bit(bit, cpuc->used_mask);
1627
1628 return idx;
1629}
1630
1631static void krait_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1632 struct perf_event *event)
1633{
1634 int bit;
1635 struct hw_perf_event *hwc = &event->hw;
1636 unsigned int region = EVENT_REGION(hwc->config_base);
1637 unsigned int group = EVENT_GROUP(hwc->config_base);
1638 bool venum_event = EVENT_VENUM(hwc->config_base);
1639 bool krait_event = EVENT_CPU(hwc->config_base);
1640
1641 if (venum_event || krait_event) {
1642 bit = krait_event_to_bit(event, region, group);
1643 clear_bit(bit, cpuc->used_mask);
1644 }
1645}
1646
1647static int krait_pmu_init(struct arm_pmu *cpu_pmu)
1648{
1649 armv7pmu_init(cpu_pmu);
1650 cpu_pmu->name = "armv7_krait";
1651
1652 if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node,
1653 "qcom,no-pc-write"))
1654 cpu_pmu->map_event = krait_map_event_no_branch;
1655 else
1656 cpu_pmu->map_event = krait_map_event;
1657 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1658 cpu_pmu->reset = krait_pmu_reset;
1659 cpu_pmu->enable = krait_pmu_enable_event;
1660 cpu_pmu->disable = krait_pmu_disable_event;
1661 cpu_pmu->get_event_idx = krait_pmu_get_event_idx;
1662 cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx;
1663 return armv7_probe_num_events(cpu_pmu);
1664}
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702static u32 scorpion_read_pmresrn(int n)
1703{
1704 u32 val;
1705
1706 switch (n) {
1707 case 0:
1708 asm volatile("mrc p15, 0, %0, c15, c0, 0" : "=r" (val));
1709 break;
1710 case 1:
1711 asm volatile("mrc p15, 1, %0, c15, c0, 0" : "=r" (val));
1712 break;
1713 case 2:
1714 asm volatile("mrc p15, 2, %0, c15, c0, 0" : "=r" (val));
1715 break;
1716 case 3:
1717 asm volatile("mrc p15, 3, %0, c15, c2, 0" : "=r" (val));
1718 break;
1719 default:
1720 BUG();
1721 }
1722
1723 return val;
1724}
1725
1726static void scorpion_write_pmresrn(int n, u32 val)
1727{
1728 switch (n) {
1729 case 0:
1730 asm volatile("mcr p15, 0, %0, c15, c0, 0" : : "r" (val));
1731 break;
1732 case 1:
1733 asm volatile("mcr p15, 1, %0, c15, c0, 0" : : "r" (val));
1734 break;
1735 case 2:
1736 asm volatile("mcr p15, 2, %0, c15, c0, 0" : : "r" (val));
1737 break;
1738 case 3:
1739 asm volatile("mcr p15, 3, %0, c15, c2, 0" : : "r" (val));
1740 break;
1741 default:
1742 BUG();
1743 }
1744}
1745
1746static u32 scorpion_get_pmresrn_event(unsigned int region)
1747{
1748 static const u32 pmresrn_table[] = { SCORPION_LPM0_GROUP0,
1749 SCORPION_LPM1_GROUP0,
1750 SCORPION_LPM2_GROUP0,
1751 SCORPION_L2LPM_GROUP0 };
1752 return pmresrn_table[region];
1753}
1754
1755static void scorpion_evt_setup(int idx, u32 config_base)
1756{
1757 u32 val;
1758 u32 mask;
1759 u32 vval, fval;
1760 unsigned int region = EVENT_REGION(config_base);
1761 unsigned int group = EVENT_GROUP(config_base);
1762 unsigned int code = EVENT_CODE(config_base);
1763 unsigned int group_shift;
1764 bool venum_event = EVENT_VENUM(config_base);
1765
1766 group_shift = group * 8;
1767 mask = 0xff << group_shift;
1768
1769
1770 if (venum_event)
1771 val = SCORPION_VLPM_GROUP0;
1772 else
1773 val = scorpion_get_pmresrn_event(region);
1774 val += group;
1775
1776 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1777 armv7_pmnc_write_evtsel(idx, val);
1778
1779 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1780
1781 if (venum_event) {
1782 venum_pre_pmresr(&vval, &fval);
1783 val = venum_read_pmresr();
1784 val &= ~mask;
1785 val |= code << group_shift;
1786 val |= PMRESRn_EN;
1787 venum_write_pmresr(val);
1788 venum_post_pmresr(vval, fval);
1789 } else {
1790 val = scorpion_read_pmresrn(region);
1791 val &= ~mask;
1792 val |= code << group_shift;
1793 val |= PMRESRn_EN;
1794 scorpion_write_pmresrn(region, val);
1795 }
1796}
1797
1798static void scorpion_clearpmu(u32 config_base)
1799{
1800 u32 val;
1801 u32 vval, fval;
1802 unsigned int region = EVENT_REGION(config_base);
1803 unsigned int group = EVENT_GROUP(config_base);
1804 bool venum_event = EVENT_VENUM(config_base);
1805
1806 if (venum_event) {
1807 venum_pre_pmresr(&vval, &fval);
1808 val = venum_read_pmresr();
1809 val = clear_pmresrn_group(val, group);
1810 venum_write_pmresr(val);
1811 venum_post_pmresr(vval, fval);
1812 } else {
1813 val = scorpion_read_pmresrn(region);
1814 val = clear_pmresrn_group(val, group);
1815 scorpion_write_pmresrn(region, val);
1816 }
1817}
1818
1819static void scorpion_pmu_disable_event(struct perf_event *event)
1820{
1821 unsigned long flags;
1822 struct hw_perf_event *hwc = &event->hw;
1823 int idx = hwc->idx;
1824 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1825 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1826
1827
1828 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1829
1830
1831 armv7_pmnc_disable_counter(idx);
1832
1833
1834
1835
1836 if (hwc->config_base & KRAIT_EVENT_MASK)
1837 scorpion_clearpmu(hwc->config_base);
1838
1839
1840 armv7_pmnc_disable_intens(idx);
1841
1842 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1843}
1844
1845static void scorpion_pmu_enable_event(struct perf_event *event)
1846{
1847 unsigned long flags;
1848 struct hw_perf_event *hwc = &event->hw;
1849 int idx = hwc->idx;
1850 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1851 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1852
1853
1854
1855
1856
1857 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1858
1859
1860 armv7_pmnc_disable_counter(idx);
1861
1862
1863
1864
1865
1866
1867 if (hwc->config_base & KRAIT_EVENT_MASK)
1868 scorpion_evt_setup(idx, hwc->config_base);
1869 else if (idx != ARMV7_IDX_CYCLE_COUNTER)
1870 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1871
1872
1873 armv7_pmnc_enable_intens(idx);
1874
1875
1876 armv7_pmnc_enable_counter(idx);
1877
1878 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1879}
1880
1881static void scorpion_pmu_reset(void *info)
1882{
1883 u32 vval, fval;
1884 struct arm_pmu *cpu_pmu = info;
1885 u32 idx, nb_cnt = cpu_pmu->num_events;
1886
1887 armv7pmu_reset(info);
1888
1889
1890 scorpion_write_pmresrn(0, 0);
1891 scorpion_write_pmresrn(1, 0);
1892 scorpion_write_pmresrn(2, 0);
1893 scorpion_write_pmresrn(3, 0);
1894
1895 venum_pre_pmresr(&vval, &fval);
1896 venum_write_pmresr(0);
1897 venum_post_pmresr(vval, fval);
1898
1899
1900 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1901 armv7_pmnc_select_counter(idx);
1902 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1903 }
1904}
1905
1906static int scorpion_event_to_bit(struct perf_event *event, unsigned int region,
1907 unsigned int group)
1908{
1909 int bit;
1910 struct hw_perf_event *hwc = &event->hw;
1911 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1912
1913 if (hwc->config_base & VENUM_EVENT)
1914 bit = SCORPION_VLPM_GROUP0;
1915 else
1916 bit = scorpion_get_pmresrn_event(region);
1917 bit -= scorpion_get_pmresrn_event(0);
1918 bit += group;
1919
1920
1921
1922
1923 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1924
1925 return bit;
1926}
1927
1928
1929
1930
1931
1932static int scorpion_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1933 struct perf_event *event)
1934{
1935 int idx;
1936 int bit = -1;
1937 struct hw_perf_event *hwc = &event->hw;
1938 unsigned int region = EVENT_REGION(hwc->config_base);
1939 unsigned int group = EVENT_GROUP(hwc->config_base);
1940 bool venum_event = EVENT_VENUM(hwc->config_base);
1941 bool scorpion_event = EVENT_CPU(hwc->config_base);
1942
1943 if (venum_event || scorpion_event) {
1944
1945 if (group > 3 || region > 3)
1946 return -EINVAL;
1947
1948 bit = scorpion_event_to_bit(event, region, group);
1949 if (test_and_set_bit(bit, cpuc->used_mask))
1950 return -EAGAIN;
1951 }
1952
1953 idx = armv7pmu_get_event_idx(cpuc, event);
1954 if (idx < 0 && bit >= 0)
1955 clear_bit(bit, cpuc->used_mask);
1956
1957 return idx;
1958}
1959
1960static void scorpion_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1961 struct perf_event *event)
1962{
1963 int bit;
1964 struct hw_perf_event *hwc = &event->hw;
1965 unsigned int region = EVENT_REGION(hwc->config_base);
1966 unsigned int group = EVENT_GROUP(hwc->config_base);
1967 bool venum_event = EVENT_VENUM(hwc->config_base);
1968 bool scorpion_event = EVENT_CPU(hwc->config_base);
1969
1970 if (venum_event || scorpion_event) {
1971 bit = scorpion_event_to_bit(event, region, group);
1972 clear_bit(bit, cpuc->used_mask);
1973 }
1974}
1975
1976static int scorpion_pmu_init(struct arm_pmu *cpu_pmu)
1977{
1978 armv7pmu_init(cpu_pmu);
1979 cpu_pmu->name = "armv7_scorpion";
1980 cpu_pmu->map_event = scorpion_map_event;
1981 cpu_pmu->reset = scorpion_pmu_reset;
1982 cpu_pmu->enable = scorpion_pmu_enable_event;
1983 cpu_pmu->disable = scorpion_pmu_disable_event;
1984 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
1985 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
1986 return armv7_probe_num_events(cpu_pmu);
1987}
1988
1989static int scorpion_mp_pmu_init(struct arm_pmu *cpu_pmu)
1990{
1991 armv7pmu_init(cpu_pmu);
1992 cpu_pmu->name = "armv7_scorpion_mp";
1993 cpu_pmu->map_event = scorpion_map_event;
1994 cpu_pmu->reset = scorpion_pmu_reset;
1995 cpu_pmu->enable = scorpion_pmu_enable_event;
1996 cpu_pmu->disable = scorpion_pmu_disable_event;
1997 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
1998 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
1999 return armv7_probe_num_events(cpu_pmu);
2000}
2001
2002static const struct of_device_id armv7_pmu_of_device_ids[] = {
2003 {.compatible = "arm,cortex-a17-pmu", .data = armv7_a17_pmu_init},
2004 {.compatible = "arm,cortex-a15-pmu", .data = armv7_a15_pmu_init},
2005 {.compatible = "arm,cortex-a12-pmu", .data = armv7_a12_pmu_init},
2006 {.compatible = "arm,cortex-a9-pmu", .data = armv7_a9_pmu_init},
2007 {.compatible = "arm,cortex-a8-pmu", .data = armv7_a8_pmu_init},
2008 {.compatible = "arm,cortex-a7-pmu", .data = armv7_a7_pmu_init},
2009 {.compatible = "arm,cortex-a5-pmu", .data = armv7_a5_pmu_init},
2010 {.compatible = "qcom,krait-pmu", .data = krait_pmu_init},
2011 {.compatible = "qcom,scorpion-pmu", .data = scorpion_pmu_init},
2012 {.compatible = "qcom,scorpion-mp-pmu", .data = scorpion_mp_pmu_init},
2013 {},
2014};
2015
2016static const struct pmu_probe_info armv7_pmu_probe_table[] = {
2017 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A8, armv7_a8_pmu_init),
2018 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A9, armv7_a9_pmu_init),
2019 { }
2020};
2021
2022
2023static int armv7_pmu_device_probe(struct platform_device *pdev)
2024{
2025 return arm_pmu_device_probe(pdev, armv7_pmu_of_device_ids,
2026 armv7_pmu_probe_table);
2027}
2028
2029static struct platform_driver armv7_pmu_driver = {
2030 .driver = {
2031 .name = "armv7-pmu",
2032 .of_match_table = armv7_pmu_of_device_ids,
2033 },
2034 .probe = armv7_pmu_device_probe,
2035};
2036
2037static int __init register_armv7_pmu_driver(void)
2038{
2039 return platform_driver_register(&armv7_pmu_driver);
2040}
2041device_initcall(register_armv7_pmu_driver);
2042#endif
2043