1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19#ifdef CONFIG_CPU_V7
20
21#include <asm/cp15.h>
22#include <asm/cputype.h>
23#include <asm/irq_regs.h>
24#include <asm/vfp.h>
25#include "../vfp/vfpinstr.h"
26
27#include <linux/of.h>
28#include <linux/perf/arm_pmu.h>
29#include <linux/platform_device.h>
30
31
32
33
34
35
36
37
38#define ARMV7_PERFCTR_PMNC_SW_INCR 0x00
39#define ARMV7_PERFCTR_L1_ICACHE_REFILL 0x01
40#define ARMV7_PERFCTR_ITLB_REFILL 0x02
41#define ARMV7_PERFCTR_L1_DCACHE_REFILL 0x03
42#define ARMV7_PERFCTR_L1_DCACHE_ACCESS 0x04
43#define ARMV7_PERFCTR_DTLB_REFILL 0x05
44#define ARMV7_PERFCTR_MEM_READ 0x06
45#define ARMV7_PERFCTR_MEM_WRITE 0x07
46#define ARMV7_PERFCTR_INSTR_EXECUTED 0x08
47#define ARMV7_PERFCTR_EXC_TAKEN 0x09
48#define ARMV7_PERFCTR_EXC_EXECUTED 0x0A
49#define ARMV7_PERFCTR_CID_WRITE 0x0B
50
51
52
53
54
55
56
57
58#define ARMV7_PERFCTR_PC_WRITE 0x0C
59#define ARMV7_PERFCTR_PC_IMM_BRANCH 0x0D
60#define ARMV7_PERFCTR_PC_PROC_RETURN 0x0E
61#define ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS 0x0F
62#define ARMV7_PERFCTR_PC_BRANCH_MIS_PRED 0x10
63#define ARMV7_PERFCTR_CLOCK_CYCLES 0x11
64#define ARMV7_PERFCTR_PC_BRANCH_PRED 0x12
65
66
67#define ARMV7_PERFCTR_MEM_ACCESS 0x13
68#define ARMV7_PERFCTR_L1_ICACHE_ACCESS 0x14
69#define ARMV7_PERFCTR_L1_DCACHE_WB 0x15
70#define ARMV7_PERFCTR_L2_CACHE_ACCESS 0x16
71#define ARMV7_PERFCTR_L2_CACHE_REFILL 0x17
72#define ARMV7_PERFCTR_L2_CACHE_WB 0x18
73#define ARMV7_PERFCTR_BUS_ACCESS 0x19
74#define ARMV7_PERFCTR_MEM_ERROR 0x1A
75#define ARMV7_PERFCTR_INSTR_SPEC 0x1B
76#define ARMV7_PERFCTR_TTBR_WRITE 0x1C
77#define ARMV7_PERFCTR_BUS_CYCLES 0x1D
78
79#define ARMV7_PERFCTR_CPU_CYCLES 0xFF
80
81
82#define ARMV7_A8_PERFCTR_L2_CACHE_ACCESS 0x43
83#define ARMV7_A8_PERFCTR_L2_CACHE_REFILL 0x44
84#define ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS 0x50
85#define ARMV7_A8_PERFCTR_STALL_ISIDE 0x56
86
87
88#define ARMV7_A9_PERFCTR_INSTR_CORE_RENAME 0x68
89#define ARMV7_A9_PERFCTR_STALL_ICACHE 0x60
90#define ARMV7_A9_PERFCTR_STALL_DISPATCH 0x66
91
92
93#define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL 0xc2
94#define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP 0xc3
95
96
97#define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
98#define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
99#define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ 0x42
100#define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE 0x43
101
102#define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ 0x4C
103#define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE 0x4D
104
105#define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ 0x50
106#define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
107#define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ 0x52
108#define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE 0x53
109
110#define ARMV7_A15_PERFCTR_PC_WRITE_SPEC 0x76
111
112
113#define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
114#define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
115
116#define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ 0x50
117#define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
118
119#define ARMV7_A12_PERFCTR_PC_WRITE_SPEC 0x76
120
121#define ARMV7_A12_PERFCTR_PF_TLB_REFILL 0xe7
122
123
124#define KRAIT_PMRESR0_GROUP0 0xcc
125#define KRAIT_PMRESR1_GROUP0 0xd0
126#define KRAIT_PMRESR2_GROUP0 0xd4
127#define KRAIT_VPMRESR0_GROUP0 0xd8
128
129#define KRAIT_PERFCTR_L1_ICACHE_ACCESS 0x10011
130#define KRAIT_PERFCTR_L1_ICACHE_MISS 0x10010
131
132#define KRAIT_PERFCTR_L1_ITLB_ACCESS 0x12222
133#define KRAIT_PERFCTR_L1_DTLB_ACCESS 0x12210
134
135
136#define SCORPION_LPM0_GROUP0 0x4c
137#define SCORPION_LPM1_GROUP0 0x50
138#define SCORPION_LPM2_GROUP0 0x54
139#define SCORPION_L2LPM_GROUP0 0x58
140#define SCORPION_VLPM_GROUP0 0x5c
141
142#define SCORPION_ICACHE_ACCESS 0x10053
143#define SCORPION_ICACHE_MISS 0x10052
144
145#define SCORPION_DTLB_ACCESS 0x12013
146#define SCORPION_DTLB_MISS 0x12012
147
148#define SCORPION_ITLB_MISS 0x12021
149
150
151
152
153
154
155
156
157static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
158 PERF_MAP_ALL_UNSUPPORTED,
159 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
160 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
161 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
162 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
163 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
164 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
165 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A8_PERFCTR_STALL_ISIDE,
166};
167
168static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
169 [PERF_COUNT_HW_CACHE_OP_MAX]
170 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
171 PERF_CACHE_MAP_ALL_UNSUPPORTED,
172
173
174
175
176
177
178 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
179 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
180 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
181 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
182
183 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS,
184 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
185
186 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
187 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
188 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
189 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
190
191 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
192 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
193
194 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
195 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
196
197 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
198 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
199 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
200 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
201};
202
203
204
205
206static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
207 PERF_MAP_ALL_UNSUPPORTED,
208 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
209 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_A9_PERFCTR_INSTR_CORE_RENAME,
210 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
211 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
212 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
213 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
214 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A9_PERFCTR_STALL_ICACHE,
215 [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV7_A9_PERFCTR_STALL_DISPATCH,
216};
217
218static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
219 [PERF_COUNT_HW_CACHE_OP_MAX]
220 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
221 PERF_CACHE_MAP_ALL_UNSUPPORTED,
222
223
224
225
226
227
228 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
229 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
230 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
231 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
232
233 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
234
235 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
236 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
237
238 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
239 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
240
241 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
242 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
243 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
244 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
245};
246
247
248
249
250static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
251 PERF_MAP_ALL_UNSUPPORTED,
252 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
253 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
254 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
255 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
256 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
257 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
258};
259
260static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
261 [PERF_COUNT_HW_CACHE_OP_MAX]
262 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
263 PERF_CACHE_MAP_ALL_UNSUPPORTED,
264
265 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
266 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
267 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
268 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
269 [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
270 [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
271
272 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
273 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
274
275
276
277
278 [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
279 [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
280
281 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
282 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
283
284 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
285 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
286
287 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
288 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
289 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
290 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
291};
292
293
294
295
296static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
297 PERF_MAP_ALL_UNSUPPORTED,
298 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
299 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
300 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
301 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
302 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A15_PERFCTR_PC_WRITE_SPEC,
303 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
304 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
305};
306
307static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
308 [PERF_COUNT_HW_CACHE_OP_MAX]
309 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
310 PERF_CACHE_MAP_ALL_UNSUPPORTED,
311
312 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ,
313 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ,
314 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE,
315 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE,
316
317
318
319
320
321
322 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
323 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
324
325 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ,
326 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ,
327 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE,
328 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE,
329
330 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ,
331 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE,
332
333 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
334 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
335
336 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
337 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
338 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
339 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
340};
341
342
343
344
345static const unsigned armv7_a7_perf_map[PERF_COUNT_HW_MAX] = {
346 PERF_MAP_ALL_UNSUPPORTED,
347 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
348 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
349 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
350 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
351 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
352 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
353 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
354};
355
356static const unsigned armv7_a7_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
357 [PERF_COUNT_HW_CACHE_OP_MAX]
358 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
359 PERF_CACHE_MAP_ALL_UNSUPPORTED,
360
361
362
363
364
365
366 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
367 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
368 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
369 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
370
371 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
372 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
373
374 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
375 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
376 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
377 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
378
379 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
380 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
381
382 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
383 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
384
385 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
386 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
387 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
388 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
389};
390
391
392
393
394static const unsigned armv7_a12_perf_map[PERF_COUNT_HW_MAX] = {
395 PERF_MAP_ALL_UNSUPPORTED,
396 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
397 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
398 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
399 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
400 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A12_PERFCTR_PC_WRITE_SPEC,
401 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
402 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
403};
404
405static const unsigned armv7_a12_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
406 [PERF_COUNT_HW_CACHE_OP_MAX]
407 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
408 PERF_CACHE_MAP_ALL_UNSUPPORTED,
409
410 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ,
411 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
412 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE,
413 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
414
415
416
417
418
419
420 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
421 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
422
423 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ,
424 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
425 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE,
426 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
427
428 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
429 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
430 [C(DTLB)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A12_PERFCTR_PF_TLB_REFILL,
431
432 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
433 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
434
435 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
436 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
437 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
438 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
439};
440
441
442
443
444static const unsigned krait_perf_map[PERF_COUNT_HW_MAX] = {
445 PERF_MAP_ALL_UNSUPPORTED,
446 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
447 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
448 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
449 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
450 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
451};
452
453static const unsigned krait_perf_map_no_branch[PERF_COUNT_HW_MAX] = {
454 PERF_MAP_ALL_UNSUPPORTED,
455 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
456 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
457 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
458 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
459};
460
461static const unsigned krait_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
462 [PERF_COUNT_HW_CACHE_OP_MAX]
463 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
464 PERF_CACHE_MAP_ALL_UNSUPPORTED,
465
466
467
468
469
470
471 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
472 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
473 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
474 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
475
476 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ICACHE_ACCESS,
477 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = KRAIT_PERFCTR_L1_ICACHE_MISS,
478
479 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
480 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
481
482 [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
483 [C(ITLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
484
485 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
486 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
487 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
488 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
489};
490
491
492
493
494static const unsigned scorpion_perf_map[PERF_COUNT_HW_MAX] = {
495 PERF_MAP_ALL_UNSUPPORTED,
496 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
497 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
498 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
499 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
500 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
501};
502
503static const unsigned scorpion_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
504 [PERF_COUNT_HW_CACHE_OP_MAX]
505 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
506 PERF_CACHE_MAP_ALL_UNSUPPORTED,
507
508
509
510
511
512 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
513 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
514 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
515 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
516 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_ICACHE_ACCESS,
517 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ICACHE_MISS,
518
519
520
521
522 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
523 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
524 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
525 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
526 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
527 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
528 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
529 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
530 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
531 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
532};
533
534PMU_FORMAT_ATTR(event, "config:0-7");
535
536static struct attribute *armv7_pmu_format_attrs[] = {
537 &format_attr_event.attr,
538 NULL,
539};
540
541static struct attribute_group armv7_pmu_format_attr_group = {
542 .name = "format",
543 .attrs = armv7_pmu_format_attrs,
544};
545
546#define ARMV7_EVENT_ATTR_RESOLVE(m) #m
547#define ARMV7_EVENT_ATTR(name, config) \
548 PMU_EVENT_ATTR_STRING(name, armv7_event_attr_##name, \
549 "event=" ARMV7_EVENT_ATTR_RESOLVE(config))
550
551ARMV7_EVENT_ATTR(sw_incr, ARMV7_PERFCTR_PMNC_SW_INCR);
552ARMV7_EVENT_ATTR(l1i_cache_refill, ARMV7_PERFCTR_L1_ICACHE_REFILL);
553ARMV7_EVENT_ATTR(l1i_tlb_refill, ARMV7_PERFCTR_ITLB_REFILL);
554ARMV7_EVENT_ATTR(l1d_cache_refill, ARMV7_PERFCTR_L1_DCACHE_REFILL);
555ARMV7_EVENT_ATTR(l1d_cache, ARMV7_PERFCTR_L1_DCACHE_ACCESS);
556ARMV7_EVENT_ATTR(l1d_tlb_refill, ARMV7_PERFCTR_DTLB_REFILL);
557ARMV7_EVENT_ATTR(ld_retired, ARMV7_PERFCTR_MEM_READ);
558ARMV7_EVENT_ATTR(st_retired, ARMV7_PERFCTR_MEM_WRITE);
559ARMV7_EVENT_ATTR(inst_retired, ARMV7_PERFCTR_INSTR_EXECUTED);
560ARMV7_EVENT_ATTR(exc_taken, ARMV7_PERFCTR_EXC_TAKEN);
561ARMV7_EVENT_ATTR(exc_return, ARMV7_PERFCTR_EXC_EXECUTED);
562ARMV7_EVENT_ATTR(cid_write_retired, ARMV7_PERFCTR_CID_WRITE);
563ARMV7_EVENT_ATTR(pc_write_retired, ARMV7_PERFCTR_PC_WRITE);
564ARMV7_EVENT_ATTR(br_immed_retired, ARMV7_PERFCTR_PC_IMM_BRANCH);
565ARMV7_EVENT_ATTR(br_return_retired, ARMV7_PERFCTR_PC_PROC_RETURN);
566ARMV7_EVENT_ATTR(unaligned_ldst_retired, ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS);
567ARMV7_EVENT_ATTR(br_mis_pred, ARMV7_PERFCTR_PC_BRANCH_MIS_PRED);
568ARMV7_EVENT_ATTR(cpu_cycles, ARMV7_PERFCTR_CLOCK_CYCLES);
569ARMV7_EVENT_ATTR(br_pred, ARMV7_PERFCTR_PC_BRANCH_PRED);
570
571static struct attribute *armv7_pmuv1_event_attrs[] = {
572 &armv7_event_attr_sw_incr.attr.attr,
573 &armv7_event_attr_l1i_cache_refill.attr.attr,
574 &armv7_event_attr_l1i_tlb_refill.attr.attr,
575 &armv7_event_attr_l1d_cache_refill.attr.attr,
576 &armv7_event_attr_l1d_cache.attr.attr,
577 &armv7_event_attr_l1d_tlb_refill.attr.attr,
578 &armv7_event_attr_ld_retired.attr.attr,
579 &armv7_event_attr_st_retired.attr.attr,
580 &armv7_event_attr_inst_retired.attr.attr,
581 &armv7_event_attr_exc_taken.attr.attr,
582 &armv7_event_attr_exc_return.attr.attr,
583 &armv7_event_attr_cid_write_retired.attr.attr,
584 &armv7_event_attr_pc_write_retired.attr.attr,
585 &armv7_event_attr_br_immed_retired.attr.attr,
586 &armv7_event_attr_br_return_retired.attr.attr,
587 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
588 &armv7_event_attr_br_mis_pred.attr.attr,
589 &armv7_event_attr_cpu_cycles.attr.attr,
590 &armv7_event_attr_br_pred.attr.attr,
591 NULL,
592};
593
594static struct attribute_group armv7_pmuv1_events_attr_group = {
595 .name = "events",
596 .attrs = armv7_pmuv1_event_attrs,
597};
598
599static const struct attribute_group *armv7_pmuv1_attr_groups[] = {
600 &armv7_pmuv1_events_attr_group,
601 &armv7_pmu_format_attr_group,
602 NULL,
603};
604
605ARMV7_EVENT_ATTR(mem_access, ARMV7_PERFCTR_MEM_ACCESS);
606ARMV7_EVENT_ATTR(l1i_cache, ARMV7_PERFCTR_L1_ICACHE_ACCESS);
607ARMV7_EVENT_ATTR(l1d_cache_wb, ARMV7_PERFCTR_L1_DCACHE_WB);
608ARMV7_EVENT_ATTR(l2d_cache, ARMV7_PERFCTR_L2_CACHE_ACCESS);
609ARMV7_EVENT_ATTR(l2d_cache_refill, ARMV7_PERFCTR_L2_CACHE_REFILL);
610ARMV7_EVENT_ATTR(l2d_cache_wb, ARMV7_PERFCTR_L2_CACHE_WB);
611ARMV7_EVENT_ATTR(bus_access, ARMV7_PERFCTR_BUS_ACCESS);
612ARMV7_EVENT_ATTR(memory_error, ARMV7_PERFCTR_MEM_ERROR);
613ARMV7_EVENT_ATTR(inst_spec, ARMV7_PERFCTR_INSTR_SPEC);
614ARMV7_EVENT_ATTR(ttbr_write_retired, ARMV7_PERFCTR_TTBR_WRITE);
615ARMV7_EVENT_ATTR(bus_cycles, ARMV7_PERFCTR_BUS_CYCLES);
616
617static struct attribute *armv7_pmuv2_event_attrs[] = {
618 &armv7_event_attr_sw_incr.attr.attr,
619 &armv7_event_attr_l1i_cache_refill.attr.attr,
620 &armv7_event_attr_l1i_tlb_refill.attr.attr,
621 &armv7_event_attr_l1d_cache_refill.attr.attr,
622 &armv7_event_attr_l1d_cache.attr.attr,
623 &armv7_event_attr_l1d_tlb_refill.attr.attr,
624 &armv7_event_attr_ld_retired.attr.attr,
625 &armv7_event_attr_st_retired.attr.attr,
626 &armv7_event_attr_inst_retired.attr.attr,
627 &armv7_event_attr_exc_taken.attr.attr,
628 &armv7_event_attr_exc_return.attr.attr,
629 &armv7_event_attr_cid_write_retired.attr.attr,
630 &armv7_event_attr_pc_write_retired.attr.attr,
631 &armv7_event_attr_br_immed_retired.attr.attr,
632 &armv7_event_attr_br_return_retired.attr.attr,
633 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
634 &armv7_event_attr_br_mis_pred.attr.attr,
635 &armv7_event_attr_cpu_cycles.attr.attr,
636 &armv7_event_attr_br_pred.attr.attr,
637 &armv7_event_attr_mem_access.attr.attr,
638 &armv7_event_attr_l1i_cache.attr.attr,
639 &armv7_event_attr_l1d_cache_wb.attr.attr,
640 &armv7_event_attr_l2d_cache.attr.attr,
641 &armv7_event_attr_l2d_cache_refill.attr.attr,
642 &armv7_event_attr_l2d_cache_wb.attr.attr,
643 &armv7_event_attr_bus_access.attr.attr,
644 &armv7_event_attr_memory_error.attr.attr,
645 &armv7_event_attr_inst_spec.attr.attr,
646 &armv7_event_attr_ttbr_write_retired.attr.attr,
647 &armv7_event_attr_bus_cycles.attr.attr,
648 NULL,
649};
650
651static struct attribute_group armv7_pmuv2_events_attr_group = {
652 .name = "events",
653 .attrs = armv7_pmuv2_event_attrs,
654};
655
656static const struct attribute_group *armv7_pmuv2_attr_groups[] = {
657 &armv7_pmuv2_events_attr_group,
658 &armv7_pmu_format_attr_group,
659 NULL,
660};
661
662
663
664
665#define ARMV7_IDX_CYCLE_COUNTER 0
666#define ARMV7_IDX_COUNTER0 1
667#define ARMV7_IDX_COUNTER_LAST(cpu_pmu) \
668 (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
669
670#define ARMV7_MAX_COUNTERS 32
671#define ARMV7_COUNTER_MASK (ARMV7_MAX_COUNTERS - 1)
672
673
674
675
676
677
678
679
680#define ARMV7_IDX_TO_COUNTER(x) \
681 (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
682
683
684
685
686#define ARMV7_PMNC_E (1 << 0)
687#define ARMV7_PMNC_P (1 << 1)
688#define ARMV7_PMNC_C (1 << 2)
689#define ARMV7_PMNC_D (1 << 3)
690#define ARMV7_PMNC_X (1 << 4)
691#define ARMV7_PMNC_DP (1 << 5)
692#define ARMV7_PMNC_N_SHIFT 11
693#define ARMV7_PMNC_N_MASK 0x1f
694#define ARMV7_PMNC_MASK 0x3f
695
696
697
698
699#define ARMV7_FLAG_MASK 0xffffffff
700#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
701
702
703
704
705#define ARMV7_EVTYPE_MASK 0xc80000ff
706#define ARMV7_EVTYPE_EVENT 0xff
707
708
709
710
711#define ARMV7_EXCLUDE_PL1 (1 << 31)
712#define ARMV7_EXCLUDE_USER (1 << 30)
713#define ARMV7_INCLUDE_HYP (1 << 27)
714
715static inline u32 armv7_pmnc_read(void)
716{
717 u32 val;
718 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
719 return val;
720}
721
722static inline void armv7_pmnc_write(u32 val)
723{
724 val &= ARMV7_PMNC_MASK;
725 isb();
726 asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
727}
728
729static inline int armv7_pmnc_has_overflowed(u32 pmnc)
730{
731 return pmnc & ARMV7_OVERFLOWED_MASK;
732}
733
734static inline int armv7_pmnc_counter_valid(struct arm_pmu *cpu_pmu, int idx)
735{
736 return idx >= ARMV7_IDX_CYCLE_COUNTER &&
737 idx <= ARMV7_IDX_COUNTER_LAST(cpu_pmu);
738}
739
740static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
741{
742 return pmnc & BIT(ARMV7_IDX_TO_COUNTER(idx));
743}
744
745static inline void armv7_pmnc_select_counter(int idx)
746{
747 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
748 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
749 isb();
750}
751
752static inline u32 armv7pmu_read_counter(struct perf_event *event)
753{
754 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
755 struct hw_perf_event *hwc = &event->hw;
756 int idx = hwc->idx;
757 u32 value = 0;
758
759 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
760 pr_err("CPU%u reading wrong counter %d\n",
761 smp_processor_id(), idx);
762 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
763 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
764 } else {
765 armv7_pmnc_select_counter(idx);
766 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
767 }
768
769 return value;
770}
771
772static inline void armv7pmu_write_counter(struct perf_event *event, u32 value)
773{
774 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
775 struct hw_perf_event *hwc = &event->hw;
776 int idx = hwc->idx;
777
778 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
779 pr_err("CPU%u writing wrong counter %d\n",
780 smp_processor_id(), idx);
781 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
782 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
783 } else {
784 armv7_pmnc_select_counter(idx);
785 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
786 }
787}
788
789static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
790{
791 armv7_pmnc_select_counter(idx);
792 val &= ARMV7_EVTYPE_MASK;
793 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
794}
795
796static inline void armv7_pmnc_enable_counter(int idx)
797{
798 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
799 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
800}
801
802static inline void armv7_pmnc_disable_counter(int idx)
803{
804 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
805 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
806}
807
808static inline void armv7_pmnc_enable_intens(int idx)
809{
810 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
811 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
812}
813
814static inline void armv7_pmnc_disable_intens(int idx)
815{
816 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
817 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
818 isb();
819
820 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (BIT(counter)));
821 isb();
822}
823
824static inline u32 armv7_pmnc_getreset_flags(void)
825{
826 u32 val;
827
828
829 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
830
831
832 val &= ARMV7_FLAG_MASK;
833 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
834
835 return val;
836}
837
838#ifdef DEBUG
839static void armv7_pmnc_dump_regs(struct arm_pmu *cpu_pmu)
840{
841 u32 val;
842 unsigned int cnt;
843
844 pr_info("PMNC registers dump:\n");
845
846 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
847 pr_info("PMNC =0x%08x\n", val);
848
849 asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
850 pr_info("CNTENS=0x%08x\n", val);
851
852 asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
853 pr_info("INTENS=0x%08x\n", val);
854
855 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
856 pr_info("FLAGS =0x%08x\n", val);
857
858 asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
859 pr_info("SELECT=0x%08x\n", val);
860
861 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
862 pr_info("CCNT =0x%08x\n", val);
863
864 for (cnt = ARMV7_IDX_COUNTER0;
865 cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
866 armv7_pmnc_select_counter(cnt);
867 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
868 pr_info("CNT[%d] count =0x%08x\n",
869 ARMV7_IDX_TO_COUNTER(cnt), val);
870 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
871 pr_info("CNT[%d] evtsel=0x%08x\n",
872 ARMV7_IDX_TO_COUNTER(cnt), val);
873 }
874}
875#endif
876
877static void armv7pmu_enable_event(struct perf_event *event)
878{
879 unsigned long flags;
880 struct hw_perf_event *hwc = &event->hw;
881 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
882 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
883 int idx = hwc->idx;
884
885 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
886 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
887 smp_processor_id(), idx);
888 return;
889 }
890
891
892
893
894
895 raw_spin_lock_irqsave(&events->pmu_lock, flags);
896
897
898
899
900 armv7_pmnc_disable_counter(idx);
901
902
903
904
905
906
907 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER)
908 armv7_pmnc_write_evtsel(idx, hwc->config_base);
909
910
911
912
913 armv7_pmnc_enable_intens(idx);
914
915
916
917
918 armv7_pmnc_enable_counter(idx);
919
920 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
921}
922
923static void armv7pmu_disable_event(struct perf_event *event)
924{
925 unsigned long flags;
926 struct hw_perf_event *hwc = &event->hw;
927 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
928 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
929 int idx = hwc->idx;
930
931 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
932 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
933 smp_processor_id(), idx);
934 return;
935 }
936
937
938
939
940 raw_spin_lock_irqsave(&events->pmu_lock, flags);
941
942
943
944
945 armv7_pmnc_disable_counter(idx);
946
947
948
949
950 armv7_pmnc_disable_intens(idx);
951
952 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
953}
954
955static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
956{
957 u32 pmnc;
958 struct perf_sample_data data;
959 struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
960 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
961 struct pt_regs *regs;
962 int idx;
963
964
965
966
967 pmnc = armv7_pmnc_getreset_flags();
968
969
970
971
972 if (!armv7_pmnc_has_overflowed(pmnc))
973 return IRQ_NONE;
974
975
976
977
978 regs = get_irq_regs();
979
980 for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
981 struct perf_event *event = cpuc->events[idx];
982 struct hw_perf_event *hwc;
983
984
985 if (!event)
986 continue;
987
988
989
990
991
992 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
993 continue;
994
995 hwc = &event->hw;
996 armpmu_event_update(event);
997 perf_sample_data_init(&data, 0, hwc->last_period);
998 if (!armpmu_event_set_period(event))
999 continue;
1000
1001 if (perf_event_overflow(event, &data, regs))
1002 cpu_pmu->disable(event);
1003 }
1004
1005
1006
1007
1008
1009
1010
1011
1012 irq_work_run();
1013
1014 return IRQ_HANDLED;
1015}
1016
1017static void armv7pmu_start(struct arm_pmu *cpu_pmu)
1018{
1019 unsigned long flags;
1020 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1021
1022 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1023
1024 armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1025 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1026}
1027
1028static void armv7pmu_stop(struct arm_pmu *cpu_pmu)
1029{
1030 unsigned long flags;
1031 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1032
1033 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1034
1035 armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1036 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1037}
1038
1039static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc,
1040 struct perf_event *event)
1041{
1042 int idx;
1043 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1044 struct hw_perf_event *hwc = &event->hw;
1045 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT;
1046
1047
1048 if (evtype == ARMV7_PERFCTR_CPU_CYCLES) {
1049 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1050 return -EAGAIN;
1051
1052 return ARMV7_IDX_CYCLE_COUNTER;
1053 }
1054
1055
1056
1057
1058
1059 for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
1060 if (!test_and_set_bit(idx, cpuc->used_mask))
1061 return idx;
1062 }
1063
1064
1065 return -EAGAIN;
1066}
1067
1068
1069
1070
1071static int armv7pmu_set_event_filter(struct hw_perf_event *event,
1072 struct perf_event_attr *attr)
1073{
1074 unsigned long config_base = 0;
1075
1076 if (attr->exclude_idle)
1077 return -EPERM;
1078 if (attr->exclude_user)
1079 config_base |= ARMV7_EXCLUDE_USER;
1080 if (attr->exclude_kernel)
1081 config_base |= ARMV7_EXCLUDE_PL1;
1082 if (!attr->exclude_hv)
1083 config_base |= ARMV7_INCLUDE_HYP;
1084
1085
1086
1087
1088
1089 event->config_base = config_base;
1090
1091 return 0;
1092}
1093
1094static void armv7pmu_reset(void *info)
1095{
1096 struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
1097 u32 idx, nb_cnt = cpu_pmu->num_events;
1098
1099
1100 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1101 armv7_pmnc_disable_counter(idx);
1102 armv7_pmnc_disable_intens(idx);
1103 }
1104
1105
1106 armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1107}
1108
1109static int armv7_a8_map_event(struct perf_event *event)
1110{
1111 return armpmu_map_event(event, &armv7_a8_perf_map,
1112 &armv7_a8_perf_cache_map, 0xFF);
1113}
1114
1115static int armv7_a9_map_event(struct perf_event *event)
1116{
1117 return armpmu_map_event(event, &armv7_a9_perf_map,
1118 &armv7_a9_perf_cache_map, 0xFF);
1119}
1120
1121static int armv7_a5_map_event(struct perf_event *event)
1122{
1123 return armpmu_map_event(event, &armv7_a5_perf_map,
1124 &armv7_a5_perf_cache_map, 0xFF);
1125}
1126
1127static int armv7_a15_map_event(struct perf_event *event)
1128{
1129 return armpmu_map_event(event, &armv7_a15_perf_map,
1130 &armv7_a15_perf_cache_map, 0xFF);
1131}
1132
1133static int armv7_a7_map_event(struct perf_event *event)
1134{
1135 return armpmu_map_event(event, &armv7_a7_perf_map,
1136 &armv7_a7_perf_cache_map, 0xFF);
1137}
1138
1139static int armv7_a12_map_event(struct perf_event *event)
1140{
1141 return armpmu_map_event(event, &armv7_a12_perf_map,
1142 &armv7_a12_perf_cache_map, 0xFF);
1143}
1144
1145static int krait_map_event(struct perf_event *event)
1146{
1147 return armpmu_map_event(event, &krait_perf_map,
1148 &krait_perf_cache_map, 0xFFFFF);
1149}
1150
1151static int krait_map_event_no_branch(struct perf_event *event)
1152{
1153 return armpmu_map_event(event, &krait_perf_map_no_branch,
1154 &krait_perf_cache_map, 0xFFFFF);
1155}
1156
1157static int scorpion_map_event(struct perf_event *event)
1158{
1159 return armpmu_map_event(event, &scorpion_perf_map,
1160 &scorpion_perf_cache_map, 0xFFFFF);
1161}
1162
1163static void armv7pmu_init(struct arm_pmu *cpu_pmu)
1164{
1165 cpu_pmu->handle_irq = armv7pmu_handle_irq;
1166 cpu_pmu->enable = armv7pmu_enable_event;
1167 cpu_pmu->disable = armv7pmu_disable_event;
1168 cpu_pmu->read_counter = armv7pmu_read_counter;
1169 cpu_pmu->write_counter = armv7pmu_write_counter;
1170 cpu_pmu->get_event_idx = armv7pmu_get_event_idx;
1171 cpu_pmu->start = armv7pmu_start;
1172 cpu_pmu->stop = armv7pmu_stop;
1173 cpu_pmu->reset = armv7pmu_reset;
1174 cpu_pmu->max_period = (1LLU << 32) - 1;
1175};
1176
1177static void armv7_read_num_pmnc_events(void *info)
1178{
1179 int *nb_cnt = info;
1180
1181
1182 *nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1183
1184
1185 *nb_cnt += 1;
1186}
1187
1188static int armv7_probe_num_events(struct arm_pmu *arm_pmu)
1189{
1190 return smp_call_function_any(&arm_pmu->supported_cpus,
1191 armv7_read_num_pmnc_events,
1192 &arm_pmu->num_events, 1);
1193}
1194
1195static int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1196{
1197 armv7pmu_init(cpu_pmu);
1198 cpu_pmu->name = "armv7_cortex_a8";
1199 cpu_pmu->map_event = armv7_a8_map_event;
1200 cpu_pmu->pmu.attr_groups = armv7_pmuv1_attr_groups;
1201 return armv7_probe_num_events(cpu_pmu);
1202}
1203
1204static int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1205{
1206 armv7pmu_init(cpu_pmu);
1207 cpu_pmu->name = "armv7_cortex_a9";
1208 cpu_pmu->map_event = armv7_a9_map_event;
1209 cpu_pmu->pmu.attr_groups = armv7_pmuv1_attr_groups;
1210 return armv7_probe_num_events(cpu_pmu);
1211}
1212
1213static int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1214{
1215 armv7pmu_init(cpu_pmu);
1216 cpu_pmu->name = "armv7_cortex_a5";
1217 cpu_pmu->map_event = armv7_a5_map_event;
1218 cpu_pmu->pmu.attr_groups = armv7_pmuv1_attr_groups;
1219 return armv7_probe_num_events(cpu_pmu);
1220}
1221
1222static int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1223{
1224 armv7pmu_init(cpu_pmu);
1225 cpu_pmu->name = "armv7_cortex_a15";
1226 cpu_pmu->map_event = armv7_a15_map_event;
1227 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1228 cpu_pmu->pmu.attr_groups = armv7_pmuv2_attr_groups;
1229 return armv7_probe_num_events(cpu_pmu);
1230}
1231
1232static int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1233{
1234 armv7pmu_init(cpu_pmu);
1235 cpu_pmu->name = "armv7_cortex_a7";
1236 cpu_pmu->map_event = armv7_a7_map_event;
1237 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1238 cpu_pmu->pmu.attr_groups = armv7_pmuv2_attr_groups;
1239 return armv7_probe_num_events(cpu_pmu);
1240}
1241
1242static int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1243{
1244 armv7pmu_init(cpu_pmu);
1245 cpu_pmu->name = "armv7_cortex_a12";
1246 cpu_pmu->map_event = armv7_a12_map_event;
1247 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1248 cpu_pmu->pmu.attr_groups = armv7_pmuv2_attr_groups;
1249 return armv7_probe_num_events(cpu_pmu);
1250}
1251
1252static int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
1253{
1254 int ret = armv7_a12_pmu_init(cpu_pmu);
1255 cpu_pmu->name = "armv7_cortex_a17";
1256 cpu_pmu->pmu.attr_groups = armv7_pmuv2_attr_groups;
1257 return ret;
1258}
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293#define KRAIT_EVENT (1 << 16)
1294#define VENUM_EVENT (2 << 16)
1295#define KRAIT_EVENT_MASK (KRAIT_EVENT | VENUM_EVENT)
1296#define PMRESRn_EN BIT(31)
1297
1298#define EVENT_REGION(event) (((event) >> 12) & 0xf)
1299#define EVENT_GROUP(event) ((event) & 0xf)
1300#define EVENT_CODE(event) (((event) >> 4) & 0xff)
1301#define EVENT_VENUM(event) (!!(event & VENUM_EVENT))
1302#define EVENT_CPU(event) (!!(event & KRAIT_EVENT))
1303
1304static u32 krait_read_pmresrn(int n)
1305{
1306 u32 val;
1307
1308 switch (n) {
1309 case 0:
1310 asm volatile("mrc p15, 1, %0, c9, c15, 0" : "=r" (val));
1311 break;
1312 case 1:
1313 asm volatile("mrc p15, 1, %0, c9, c15, 1" : "=r" (val));
1314 break;
1315 case 2:
1316 asm volatile("mrc p15, 1, %0, c9, c15, 2" : "=r" (val));
1317 break;
1318 default:
1319 BUG();
1320 }
1321
1322 return val;
1323}
1324
1325static void krait_write_pmresrn(int n, u32 val)
1326{
1327 switch (n) {
1328 case 0:
1329 asm volatile("mcr p15, 1, %0, c9, c15, 0" : : "r" (val));
1330 break;
1331 case 1:
1332 asm volatile("mcr p15, 1, %0, c9, c15, 1" : : "r" (val));
1333 break;
1334 case 2:
1335 asm volatile("mcr p15, 1, %0, c9, c15, 2" : : "r" (val));
1336 break;
1337 default:
1338 BUG();
1339 }
1340}
1341
1342static u32 venum_read_pmresr(void)
1343{
1344 u32 val;
1345 asm volatile("mrc p10, 7, %0, c11, c0, 0" : "=r" (val));
1346 return val;
1347}
1348
1349static void venum_write_pmresr(u32 val)
1350{
1351 asm volatile("mcr p10, 7, %0, c11, c0, 0" : : "r" (val));
1352}
1353
1354static void venum_pre_pmresr(u32 *venum_orig_val, u32 *fp_orig_val)
1355{
1356 u32 venum_new_val;
1357 u32 fp_new_val;
1358
1359 BUG_ON(preemptible());
1360
1361 *venum_orig_val = get_copro_access();
1362 venum_new_val = *venum_orig_val | CPACC_SVC(10) | CPACC_SVC(11);
1363 set_copro_access(venum_new_val);
1364
1365
1366 *fp_orig_val = fmrx(FPEXC);
1367 fp_new_val = *fp_orig_val | FPEXC_EN;
1368 fmxr(FPEXC, fp_new_val);
1369}
1370
1371static void venum_post_pmresr(u32 venum_orig_val, u32 fp_orig_val)
1372{
1373 BUG_ON(preemptible());
1374
1375 fmxr(FPEXC, fp_orig_val);
1376 isb();
1377
1378 set_copro_access(venum_orig_val);
1379}
1380
1381static u32 krait_get_pmresrn_event(unsigned int region)
1382{
1383 static const u32 pmresrn_table[] = { KRAIT_PMRESR0_GROUP0,
1384 KRAIT_PMRESR1_GROUP0,
1385 KRAIT_PMRESR2_GROUP0 };
1386 return pmresrn_table[region];
1387}
1388
1389static void krait_evt_setup(int idx, u32 config_base)
1390{
1391 u32 val;
1392 u32 mask;
1393 u32 vval, fval;
1394 unsigned int region = EVENT_REGION(config_base);
1395 unsigned int group = EVENT_GROUP(config_base);
1396 unsigned int code = EVENT_CODE(config_base);
1397 unsigned int group_shift;
1398 bool venum_event = EVENT_VENUM(config_base);
1399
1400 group_shift = group * 8;
1401 mask = 0xff << group_shift;
1402
1403
1404 if (venum_event)
1405 val = KRAIT_VPMRESR0_GROUP0;
1406 else
1407 val = krait_get_pmresrn_event(region);
1408 val += group;
1409
1410 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1411 armv7_pmnc_write_evtsel(idx, val);
1412
1413 if (venum_event) {
1414 venum_pre_pmresr(&vval, &fval);
1415 val = venum_read_pmresr();
1416 val &= ~mask;
1417 val |= code << group_shift;
1418 val |= PMRESRn_EN;
1419 venum_write_pmresr(val);
1420 venum_post_pmresr(vval, fval);
1421 } else {
1422 val = krait_read_pmresrn(region);
1423 val &= ~mask;
1424 val |= code << group_shift;
1425 val |= PMRESRn_EN;
1426 krait_write_pmresrn(region, val);
1427 }
1428}
1429
1430static u32 clear_pmresrn_group(u32 val, int group)
1431{
1432 u32 mask;
1433 int group_shift;
1434
1435 group_shift = group * 8;
1436 mask = 0xff << group_shift;
1437 val &= ~mask;
1438
1439
1440 if (val & ~PMRESRn_EN)
1441 return val |= PMRESRn_EN;
1442
1443 return 0;
1444}
1445
1446static void krait_clearpmu(u32 config_base)
1447{
1448 u32 val;
1449 u32 vval, fval;
1450 unsigned int region = EVENT_REGION(config_base);
1451 unsigned int group = EVENT_GROUP(config_base);
1452 bool venum_event = EVENT_VENUM(config_base);
1453
1454 if (venum_event) {
1455 venum_pre_pmresr(&vval, &fval);
1456 val = venum_read_pmresr();
1457 val = clear_pmresrn_group(val, group);
1458 venum_write_pmresr(val);
1459 venum_post_pmresr(vval, fval);
1460 } else {
1461 val = krait_read_pmresrn(region);
1462 val = clear_pmresrn_group(val, group);
1463 krait_write_pmresrn(region, val);
1464 }
1465}
1466
1467static void krait_pmu_disable_event(struct perf_event *event)
1468{
1469 unsigned long flags;
1470 struct hw_perf_event *hwc = &event->hw;
1471 int idx = hwc->idx;
1472 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1473 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1474
1475
1476 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1477
1478
1479 armv7_pmnc_disable_counter(idx);
1480
1481
1482
1483
1484 if (hwc->config_base & KRAIT_EVENT_MASK)
1485 krait_clearpmu(hwc->config_base);
1486
1487
1488 armv7_pmnc_disable_intens(idx);
1489
1490 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1491}
1492
1493static void krait_pmu_enable_event(struct perf_event *event)
1494{
1495 unsigned long flags;
1496 struct hw_perf_event *hwc = &event->hw;
1497 int idx = hwc->idx;
1498 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1499 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1500
1501
1502
1503
1504
1505 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1506
1507
1508 armv7_pmnc_disable_counter(idx);
1509
1510
1511
1512
1513
1514
1515 if (hwc->config_base & KRAIT_EVENT_MASK)
1516 krait_evt_setup(idx, hwc->config_base);
1517 else
1518 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1519
1520
1521 armv7_pmnc_enable_intens(idx);
1522
1523
1524 armv7_pmnc_enable_counter(idx);
1525
1526 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1527}
1528
1529static void krait_pmu_reset(void *info)
1530{
1531 u32 vval, fval;
1532 struct arm_pmu *cpu_pmu = info;
1533 u32 idx, nb_cnt = cpu_pmu->num_events;
1534
1535 armv7pmu_reset(info);
1536
1537
1538 krait_write_pmresrn(0, 0);
1539 krait_write_pmresrn(1, 0);
1540 krait_write_pmresrn(2, 0);
1541
1542 venum_pre_pmresr(&vval, &fval);
1543 venum_write_pmresr(0);
1544 venum_post_pmresr(vval, fval);
1545
1546
1547 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1548 armv7_pmnc_select_counter(idx);
1549 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1550 }
1551
1552}
1553
1554static int krait_event_to_bit(struct perf_event *event, unsigned int region,
1555 unsigned int group)
1556{
1557 int bit;
1558 struct hw_perf_event *hwc = &event->hw;
1559 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1560
1561 if (hwc->config_base & VENUM_EVENT)
1562 bit = KRAIT_VPMRESR0_GROUP0;
1563 else
1564 bit = krait_get_pmresrn_event(region);
1565 bit -= krait_get_pmresrn_event(0);
1566 bit += group;
1567
1568
1569
1570
1571 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1572
1573 return bit;
1574}
1575
1576
1577
1578
1579
1580static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1581 struct perf_event *event)
1582{
1583 int idx;
1584 int bit = -1;
1585 struct hw_perf_event *hwc = &event->hw;
1586 unsigned int region = EVENT_REGION(hwc->config_base);
1587 unsigned int code = EVENT_CODE(hwc->config_base);
1588 unsigned int group = EVENT_GROUP(hwc->config_base);
1589 bool venum_event = EVENT_VENUM(hwc->config_base);
1590 bool krait_event = EVENT_CPU(hwc->config_base);
1591
1592 if (venum_event || krait_event) {
1593
1594 if (group > 3 || region > 2)
1595 return -EINVAL;
1596 if (venum_event && (code & 0xe0))
1597 return -EINVAL;
1598
1599 bit = krait_event_to_bit(event, region, group);
1600 if (test_and_set_bit(bit, cpuc->used_mask))
1601 return -EAGAIN;
1602 }
1603
1604 idx = armv7pmu_get_event_idx(cpuc, event);
1605 if (idx < 0 && bit >= 0)
1606 clear_bit(bit, cpuc->used_mask);
1607
1608 return idx;
1609}
1610
1611static void krait_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1612 struct perf_event *event)
1613{
1614 int bit;
1615 struct hw_perf_event *hwc = &event->hw;
1616 unsigned int region = EVENT_REGION(hwc->config_base);
1617 unsigned int group = EVENT_GROUP(hwc->config_base);
1618 bool venum_event = EVENT_VENUM(hwc->config_base);
1619 bool krait_event = EVENT_CPU(hwc->config_base);
1620
1621 if (venum_event || krait_event) {
1622 bit = krait_event_to_bit(event, region, group);
1623 clear_bit(bit, cpuc->used_mask);
1624 }
1625}
1626
1627static int krait_pmu_init(struct arm_pmu *cpu_pmu)
1628{
1629 armv7pmu_init(cpu_pmu);
1630 cpu_pmu->name = "armv7_krait";
1631
1632 if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node,
1633 "qcom,no-pc-write"))
1634 cpu_pmu->map_event = krait_map_event_no_branch;
1635 else
1636 cpu_pmu->map_event = krait_map_event;
1637 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1638 cpu_pmu->reset = krait_pmu_reset;
1639 cpu_pmu->enable = krait_pmu_enable_event;
1640 cpu_pmu->disable = krait_pmu_disable_event;
1641 cpu_pmu->get_event_idx = krait_pmu_get_event_idx;
1642 cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx;
1643 return armv7_probe_num_events(cpu_pmu);
1644}
1645
1646
1647
1648
1649
1650
1651
1652
1653
1654
1655
1656
1657
1658
1659
1660
1661
1662
1663
1664
1665
1666
1667
1668
1669
1670
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682static u32 scorpion_read_pmresrn(int n)
1683{
1684 u32 val;
1685
1686 switch (n) {
1687 case 0:
1688 asm volatile("mrc p15, 0, %0, c15, c0, 0" : "=r" (val));
1689 break;
1690 case 1:
1691 asm volatile("mrc p15, 1, %0, c15, c0, 0" : "=r" (val));
1692 break;
1693 case 2:
1694 asm volatile("mrc p15, 2, %0, c15, c0, 0" : "=r" (val));
1695 break;
1696 case 3:
1697 asm volatile("mrc p15, 3, %0, c15, c2, 0" : "=r" (val));
1698 break;
1699 default:
1700 BUG();
1701 }
1702
1703 return val;
1704}
1705
1706static void scorpion_write_pmresrn(int n, u32 val)
1707{
1708 switch (n) {
1709 case 0:
1710 asm volatile("mcr p15, 0, %0, c15, c0, 0" : : "r" (val));
1711 break;
1712 case 1:
1713 asm volatile("mcr p15, 1, %0, c15, c0, 0" : : "r" (val));
1714 break;
1715 case 2:
1716 asm volatile("mcr p15, 2, %0, c15, c0, 0" : : "r" (val));
1717 break;
1718 case 3:
1719 asm volatile("mcr p15, 3, %0, c15, c2, 0" : : "r" (val));
1720 break;
1721 default:
1722 BUG();
1723 }
1724}
1725
1726static u32 scorpion_get_pmresrn_event(unsigned int region)
1727{
1728 static const u32 pmresrn_table[] = { SCORPION_LPM0_GROUP0,
1729 SCORPION_LPM1_GROUP0,
1730 SCORPION_LPM2_GROUP0,
1731 SCORPION_L2LPM_GROUP0 };
1732 return pmresrn_table[region];
1733}
1734
1735static void scorpion_evt_setup(int idx, u32 config_base)
1736{
1737 u32 val;
1738 u32 mask;
1739 u32 vval, fval;
1740 unsigned int region = EVENT_REGION(config_base);
1741 unsigned int group = EVENT_GROUP(config_base);
1742 unsigned int code = EVENT_CODE(config_base);
1743 unsigned int group_shift;
1744 bool venum_event = EVENT_VENUM(config_base);
1745
1746 group_shift = group * 8;
1747 mask = 0xff << group_shift;
1748
1749
1750 if (venum_event)
1751 val = SCORPION_VLPM_GROUP0;
1752 else
1753 val = scorpion_get_pmresrn_event(region);
1754 val += group;
1755
1756 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1757 armv7_pmnc_write_evtsel(idx, val);
1758
1759 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1760
1761 if (venum_event) {
1762 venum_pre_pmresr(&vval, &fval);
1763 val = venum_read_pmresr();
1764 val &= ~mask;
1765 val |= code << group_shift;
1766 val |= PMRESRn_EN;
1767 venum_write_pmresr(val);
1768 venum_post_pmresr(vval, fval);
1769 } else {
1770 val = scorpion_read_pmresrn(region);
1771 val &= ~mask;
1772 val |= code << group_shift;
1773 val |= PMRESRn_EN;
1774 scorpion_write_pmresrn(region, val);
1775 }
1776}
1777
1778static void scorpion_clearpmu(u32 config_base)
1779{
1780 u32 val;
1781 u32 vval, fval;
1782 unsigned int region = EVENT_REGION(config_base);
1783 unsigned int group = EVENT_GROUP(config_base);
1784 bool venum_event = EVENT_VENUM(config_base);
1785
1786 if (venum_event) {
1787 venum_pre_pmresr(&vval, &fval);
1788 val = venum_read_pmresr();
1789 val = clear_pmresrn_group(val, group);
1790 venum_write_pmresr(val);
1791 venum_post_pmresr(vval, fval);
1792 } else {
1793 val = scorpion_read_pmresrn(region);
1794 val = clear_pmresrn_group(val, group);
1795 scorpion_write_pmresrn(region, val);
1796 }
1797}
1798
1799static void scorpion_pmu_disable_event(struct perf_event *event)
1800{
1801 unsigned long flags;
1802 struct hw_perf_event *hwc = &event->hw;
1803 int idx = hwc->idx;
1804 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1805 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1806
1807
1808 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1809
1810
1811 armv7_pmnc_disable_counter(idx);
1812
1813
1814
1815
1816 if (hwc->config_base & KRAIT_EVENT_MASK)
1817 scorpion_clearpmu(hwc->config_base);
1818
1819
1820 armv7_pmnc_disable_intens(idx);
1821
1822 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1823}
1824
1825static void scorpion_pmu_enable_event(struct perf_event *event)
1826{
1827 unsigned long flags;
1828 struct hw_perf_event *hwc = &event->hw;
1829 int idx = hwc->idx;
1830 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1831 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1832
1833
1834
1835
1836
1837 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1838
1839
1840 armv7_pmnc_disable_counter(idx);
1841
1842
1843
1844
1845
1846
1847 if (hwc->config_base & KRAIT_EVENT_MASK)
1848 scorpion_evt_setup(idx, hwc->config_base);
1849 else if (idx != ARMV7_IDX_CYCLE_COUNTER)
1850 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1851
1852
1853 armv7_pmnc_enable_intens(idx);
1854
1855
1856 armv7_pmnc_enable_counter(idx);
1857
1858 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1859}
1860
1861static void scorpion_pmu_reset(void *info)
1862{
1863 u32 vval, fval;
1864 struct arm_pmu *cpu_pmu = info;
1865 u32 idx, nb_cnt = cpu_pmu->num_events;
1866
1867 armv7pmu_reset(info);
1868
1869
1870 scorpion_write_pmresrn(0, 0);
1871 scorpion_write_pmresrn(1, 0);
1872 scorpion_write_pmresrn(2, 0);
1873 scorpion_write_pmresrn(3, 0);
1874
1875 venum_pre_pmresr(&vval, &fval);
1876 venum_write_pmresr(0);
1877 venum_post_pmresr(vval, fval);
1878
1879
1880 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1881 armv7_pmnc_select_counter(idx);
1882 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1883 }
1884}
1885
1886static int scorpion_event_to_bit(struct perf_event *event, unsigned int region,
1887 unsigned int group)
1888{
1889 int bit;
1890 struct hw_perf_event *hwc = &event->hw;
1891 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1892
1893 if (hwc->config_base & VENUM_EVENT)
1894 bit = SCORPION_VLPM_GROUP0;
1895 else
1896 bit = scorpion_get_pmresrn_event(region);
1897 bit -= scorpion_get_pmresrn_event(0);
1898 bit += group;
1899
1900
1901
1902
1903 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1904
1905 return bit;
1906}
1907
1908
1909
1910
1911
1912static int scorpion_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1913 struct perf_event *event)
1914{
1915 int idx;
1916 int bit = -1;
1917 struct hw_perf_event *hwc = &event->hw;
1918 unsigned int region = EVENT_REGION(hwc->config_base);
1919 unsigned int group = EVENT_GROUP(hwc->config_base);
1920 bool venum_event = EVENT_VENUM(hwc->config_base);
1921 bool scorpion_event = EVENT_CPU(hwc->config_base);
1922
1923 if (venum_event || scorpion_event) {
1924
1925 if (group > 3 || region > 3)
1926 return -EINVAL;
1927
1928 bit = scorpion_event_to_bit(event, region, group);
1929 if (test_and_set_bit(bit, cpuc->used_mask))
1930 return -EAGAIN;
1931 }
1932
1933 idx = armv7pmu_get_event_idx(cpuc, event);
1934 if (idx < 0 && bit >= 0)
1935 clear_bit(bit, cpuc->used_mask);
1936
1937 return idx;
1938}
1939
1940static void scorpion_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1941 struct perf_event *event)
1942{
1943 int bit;
1944 struct hw_perf_event *hwc = &event->hw;
1945 unsigned int region = EVENT_REGION(hwc->config_base);
1946 unsigned int group = EVENT_GROUP(hwc->config_base);
1947 bool venum_event = EVENT_VENUM(hwc->config_base);
1948 bool scorpion_event = EVENT_CPU(hwc->config_base);
1949
1950 if (venum_event || scorpion_event) {
1951 bit = scorpion_event_to_bit(event, region, group);
1952 clear_bit(bit, cpuc->used_mask);
1953 }
1954}
1955
1956static int scorpion_pmu_init(struct arm_pmu *cpu_pmu)
1957{
1958 armv7pmu_init(cpu_pmu);
1959 cpu_pmu->name = "armv7_scorpion";
1960 cpu_pmu->map_event = scorpion_map_event;
1961 cpu_pmu->reset = scorpion_pmu_reset;
1962 cpu_pmu->enable = scorpion_pmu_enable_event;
1963 cpu_pmu->disable = scorpion_pmu_disable_event;
1964 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
1965 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
1966 return armv7_probe_num_events(cpu_pmu);
1967}
1968
1969static int scorpion_mp_pmu_init(struct arm_pmu *cpu_pmu)
1970{
1971 armv7pmu_init(cpu_pmu);
1972 cpu_pmu->name = "armv7_scorpion_mp";
1973 cpu_pmu->map_event = scorpion_map_event;
1974 cpu_pmu->reset = scorpion_pmu_reset;
1975 cpu_pmu->enable = scorpion_pmu_enable_event;
1976 cpu_pmu->disable = scorpion_pmu_disable_event;
1977 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
1978 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
1979 return armv7_probe_num_events(cpu_pmu);
1980}
1981
1982static const struct of_device_id armv7_pmu_of_device_ids[] = {
1983 {.compatible = "arm,cortex-a17-pmu", .data = armv7_a17_pmu_init},
1984 {.compatible = "arm,cortex-a15-pmu", .data = armv7_a15_pmu_init},
1985 {.compatible = "arm,cortex-a12-pmu", .data = armv7_a12_pmu_init},
1986 {.compatible = "arm,cortex-a9-pmu", .data = armv7_a9_pmu_init},
1987 {.compatible = "arm,cortex-a8-pmu", .data = armv7_a8_pmu_init},
1988 {.compatible = "arm,cortex-a7-pmu", .data = armv7_a7_pmu_init},
1989 {.compatible = "arm,cortex-a5-pmu", .data = armv7_a5_pmu_init},
1990 {.compatible = "qcom,krait-pmu", .data = krait_pmu_init},
1991 {.compatible = "qcom,scorpion-pmu", .data = scorpion_pmu_init},
1992 {.compatible = "qcom,scorpion-mp-pmu", .data = scorpion_mp_pmu_init},
1993 {},
1994};
1995
1996static const struct pmu_probe_info armv7_pmu_probe_table[] = {
1997 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A8, armv7_a8_pmu_init),
1998 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A9, armv7_a9_pmu_init),
1999 { }
2000};
2001
2002
2003static int armv7_pmu_device_probe(struct platform_device *pdev)
2004{
2005 return arm_pmu_device_probe(pdev, armv7_pmu_of_device_ids,
2006 armv7_pmu_probe_table);
2007}
2008
2009static struct platform_driver armv7_pmu_driver = {
2010 .driver = {
2011 .name = "armv7-pmu",
2012 .of_match_table = armv7_pmu_of_device_ids,
2013 },
2014 .probe = armv7_pmu_device_probe,
2015};
2016
2017static int __init register_armv7_pmu_driver(void)
2018{
2019 return platform_driver_register(&armv7_pmu_driver);
2020}
2021device_initcall(register_armv7_pmu_driver);
2022#endif
2023