1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19#ifdef CONFIG_CPU_V7
20
21#include <asm/cp15.h>
22#include <asm/vfp.h>
23#include "../vfp/vfpinstr.h"
24
25
26
27
28
29
30
31
32enum armv7_perf_types {
33 ARMV7_PERFCTR_PMNC_SW_INCR = 0x00,
34 ARMV7_PERFCTR_L1_ICACHE_REFILL = 0x01,
35 ARMV7_PERFCTR_ITLB_REFILL = 0x02,
36 ARMV7_PERFCTR_L1_DCACHE_REFILL = 0x03,
37 ARMV7_PERFCTR_L1_DCACHE_ACCESS = 0x04,
38 ARMV7_PERFCTR_DTLB_REFILL = 0x05,
39 ARMV7_PERFCTR_MEM_READ = 0x06,
40 ARMV7_PERFCTR_MEM_WRITE = 0x07,
41 ARMV7_PERFCTR_INSTR_EXECUTED = 0x08,
42 ARMV7_PERFCTR_EXC_TAKEN = 0x09,
43 ARMV7_PERFCTR_EXC_EXECUTED = 0x0A,
44 ARMV7_PERFCTR_CID_WRITE = 0x0B,
45
46
47
48
49
50
51
52
53 ARMV7_PERFCTR_PC_WRITE = 0x0C,
54 ARMV7_PERFCTR_PC_IMM_BRANCH = 0x0D,
55 ARMV7_PERFCTR_PC_PROC_RETURN = 0x0E,
56 ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS = 0x0F,
57 ARMV7_PERFCTR_PC_BRANCH_MIS_PRED = 0x10,
58 ARMV7_PERFCTR_CLOCK_CYCLES = 0x11,
59 ARMV7_PERFCTR_PC_BRANCH_PRED = 0x12,
60
61
62 ARMV7_PERFCTR_MEM_ACCESS = 0x13,
63 ARMV7_PERFCTR_L1_ICACHE_ACCESS = 0x14,
64 ARMV7_PERFCTR_L1_DCACHE_WB = 0x15,
65 ARMV7_PERFCTR_L2_CACHE_ACCESS = 0x16,
66 ARMV7_PERFCTR_L2_CACHE_REFILL = 0x17,
67 ARMV7_PERFCTR_L2_CACHE_WB = 0x18,
68 ARMV7_PERFCTR_BUS_ACCESS = 0x19,
69 ARMV7_PERFCTR_MEM_ERROR = 0x1A,
70 ARMV7_PERFCTR_INSTR_SPEC = 0x1B,
71 ARMV7_PERFCTR_TTBR_WRITE = 0x1C,
72 ARMV7_PERFCTR_BUS_CYCLES = 0x1D,
73
74 ARMV7_PERFCTR_CPU_CYCLES = 0xFF
75};
76
77
78enum armv7_a8_perf_types {
79 ARMV7_A8_PERFCTR_L2_CACHE_ACCESS = 0x43,
80 ARMV7_A8_PERFCTR_L2_CACHE_REFILL = 0x44,
81 ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS = 0x50,
82 ARMV7_A8_PERFCTR_STALL_ISIDE = 0x56,
83};
84
85
86enum armv7_a9_perf_types {
87 ARMV7_A9_PERFCTR_INSTR_CORE_RENAME = 0x68,
88 ARMV7_A9_PERFCTR_STALL_ICACHE = 0x60,
89 ARMV7_A9_PERFCTR_STALL_DISPATCH = 0x66,
90};
91
92
93enum armv7_a5_perf_types {
94 ARMV7_A5_PERFCTR_PREFETCH_LINEFILL = 0xc2,
95 ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP = 0xc3,
96};
97
98
99enum armv7_a15_perf_types {
100 ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ = 0x40,
101 ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE = 0x41,
102 ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ = 0x42,
103 ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE = 0x43,
104
105 ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ = 0x4C,
106 ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE = 0x4D,
107
108 ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ = 0x50,
109 ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE = 0x51,
110 ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ = 0x52,
111 ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE = 0x53,
112
113 ARMV7_A15_PERFCTR_PC_WRITE_SPEC = 0x76,
114};
115
116
117enum armv7_a12_perf_types {
118 ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ = 0x40,
119 ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE = 0x41,
120
121 ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ = 0x50,
122 ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE = 0x51,
123
124 ARMV7_A12_PERFCTR_PC_WRITE_SPEC = 0x76,
125
126 ARMV7_A12_PERFCTR_PF_TLB_REFILL = 0xe7,
127};
128
129
130enum krait_perf_types {
131 KRAIT_PMRESR0_GROUP0 = 0xcc,
132 KRAIT_PMRESR1_GROUP0 = 0xd0,
133 KRAIT_PMRESR2_GROUP0 = 0xd4,
134 KRAIT_VPMRESR0_GROUP0 = 0xd8,
135
136 KRAIT_PERFCTR_L1_ICACHE_ACCESS = 0x10011,
137 KRAIT_PERFCTR_L1_ICACHE_MISS = 0x10010,
138
139 KRAIT_PERFCTR_L1_ITLB_ACCESS = 0x12222,
140 KRAIT_PERFCTR_L1_DTLB_ACCESS = 0x12210,
141};
142
143
144
145
146
147
148
149
150static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
151 PERF_MAP_ALL_UNSUPPORTED,
152 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
153 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
154 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
155 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
156 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
157 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
158 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A8_PERFCTR_STALL_ISIDE,
159};
160
161static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
162 [PERF_COUNT_HW_CACHE_OP_MAX]
163 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
164 PERF_CACHE_MAP_ALL_UNSUPPORTED,
165
166
167
168
169
170
171 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
172 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
173 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
174 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
175
176 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS,
177 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
178
179 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
180 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
181 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
182 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
183
184 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
185 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
186
187 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
188 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
189
190 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
191 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
192 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
193 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
194};
195
196
197
198
199static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
200 PERF_MAP_ALL_UNSUPPORTED,
201 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
202 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_A9_PERFCTR_INSTR_CORE_RENAME,
203 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
204 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
205 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
206 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
207 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A9_PERFCTR_STALL_ICACHE,
208 [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV7_A9_PERFCTR_STALL_DISPATCH,
209};
210
211static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
212 [PERF_COUNT_HW_CACHE_OP_MAX]
213 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
214 PERF_CACHE_MAP_ALL_UNSUPPORTED,
215
216
217
218
219
220
221 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
222 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
223 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
224 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
225
226 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
227
228 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
229 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
230
231 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
232 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
233
234 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
235 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
236 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
237 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
238};
239
240
241
242
243static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
244 PERF_MAP_ALL_UNSUPPORTED,
245 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
246 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
247 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
248 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
249 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
250 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
251};
252
253static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
254 [PERF_COUNT_HW_CACHE_OP_MAX]
255 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
256 PERF_CACHE_MAP_ALL_UNSUPPORTED,
257
258 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
259 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
260 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
261 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
262 [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
263 [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
264
265 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
266 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
267
268
269
270
271 [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
272 [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
273
274 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
275 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
276
277 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
278 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
279
280 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
281 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
282 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
283 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
284};
285
286
287
288
289static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
290 PERF_MAP_ALL_UNSUPPORTED,
291 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
292 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
293 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
294 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
295 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A15_PERFCTR_PC_WRITE_SPEC,
296 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
297 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
298};
299
300static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
301 [PERF_COUNT_HW_CACHE_OP_MAX]
302 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
303 PERF_CACHE_MAP_ALL_UNSUPPORTED,
304
305 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ,
306 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ,
307 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE,
308 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE,
309
310
311
312
313
314
315 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
316 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
317
318 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ,
319 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ,
320 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE,
321 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE,
322
323 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ,
324 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE,
325
326 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
327 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
328
329 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
330 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
331 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
332 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
333};
334
335
336
337
338static const unsigned armv7_a7_perf_map[PERF_COUNT_HW_MAX] = {
339 PERF_MAP_ALL_UNSUPPORTED,
340 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
341 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
342 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
343 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
344 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
345 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
346 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
347};
348
349static const unsigned armv7_a7_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
350 [PERF_COUNT_HW_CACHE_OP_MAX]
351 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
352 PERF_CACHE_MAP_ALL_UNSUPPORTED,
353
354
355
356
357
358
359 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
360 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
361 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
362 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
363
364 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
365 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
366
367 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
368 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
369 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
370 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
371
372 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
373 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
374
375 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
376 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
377
378 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
379 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
380 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
381 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
382};
383
384
385
386
387static const unsigned armv7_a12_perf_map[PERF_COUNT_HW_MAX] = {
388 PERF_MAP_ALL_UNSUPPORTED,
389 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
390 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
391 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
392 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
393 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A12_PERFCTR_PC_WRITE_SPEC,
394 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
395 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
396};
397
398static const unsigned armv7_a12_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
399 [PERF_COUNT_HW_CACHE_OP_MAX]
400 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
401 PERF_CACHE_MAP_ALL_UNSUPPORTED,
402
403 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ,
404 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
405 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE,
406 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
407
408
409
410
411
412
413 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
414 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
415
416 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ,
417 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
418 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE,
419 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
420
421 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
422 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
423 [C(DTLB)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A12_PERFCTR_PF_TLB_REFILL,
424
425 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
426 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
427
428 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
429 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
430 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
431 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
432};
433
434
435
436
437static const unsigned krait_perf_map[PERF_COUNT_HW_MAX] = {
438 PERF_MAP_ALL_UNSUPPORTED,
439 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
440 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
441 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
442 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
443 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
444};
445
446static const unsigned krait_perf_map_no_branch[PERF_COUNT_HW_MAX] = {
447 PERF_MAP_ALL_UNSUPPORTED,
448 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
449 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
450 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
451 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
452};
453
454static const unsigned krait_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
455 [PERF_COUNT_HW_CACHE_OP_MAX]
456 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
457 PERF_CACHE_MAP_ALL_UNSUPPORTED,
458
459
460
461
462
463
464 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
465 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
466 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
467 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
468
469 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ICACHE_ACCESS,
470 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = KRAIT_PERFCTR_L1_ICACHE_MISS,
471
472 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
473 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
474
475 [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
476 [C(ITLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
477
478 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
479 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
480 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
481 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
482};
483
484
485
486
487#define ARMV7_IDX_CYCLE_COUNTER 0
488#define ARMV7_IDX_COUNTER0 1
489#define ARMV7_IDX_COUNTER_LAST(cpu_pmu) \
490 (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
491
492#define ARMV7_MAX_COUNTERS 32
493#define ARMV7_COUNTER_MASK (ARMV7_MAX_COUNTERS - 1)
494
495
496
497
498
499
500
501
502#define ARMV7_IDX_TO_COUNTER(x) \
503 (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
504
505
506
507
508#define ARMV7_PMNC_E (1 << 0)
509#define ARMV7_PMNC_P (1 << 1)
510#define ARMV7_PMNC_C (1 << 2)
511#define ARMV7_PMNC_D (1 << 3)
512#define ARMV7_PMNC_X (1 << 4)
513#define ARMV7_PMNC_DP (1 << 5)
514#define ARMV7_PMNC_N_SHIFT 11
515#define ARMV7_PMNC_N_MASK 0x1f
516#define ARMV7_PMNC_MASK 0x3f
517
518
519
520
521#define ARMV7_FLAG_MASK 0xffffffff
522#define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
523
524
525
526
527#define ARMV7_EVTYPE_MASK 0xc80000ff
528#define ARMV7_EVTYPE_EVENT 0xff
529
530
531
532
533#define ARMV7_EXCLUDE_PL1 (1 << 31)
534#define ARMV7_EXCLUDE_USER (1 << 30)
535#define ARMV7_INCLUDE_HYP (1 << 27)
536
537static inline u32 armv7_pmnc_read(void)
538{
539 u32 val;
540 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
541 return val;
542}
543
544static inline void armv7_pmnc_write(u32 val)
545{
546 val &= ARMV7_PMNC_MASK;
547 isb();
548 asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
549}
550
551static inline int armv7_pmnc_has_overflowed(u32 pmnc)
552{
553 return pmnc & ARMV7_OVERFLOWED_MASK;
554}
555
556static inline int armv7_pmnc_counter_valid(struct arm_pmu *cpu_pmu, int idx)
557{
558 return idx >= ARMV7_IDX_CYCLE_COUNTER &&
559 idx <= ARMV7_IDX_COUNTER_LAST(cpu_pmu);
560}
561
562static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
563{
564 return pmnc & BIT(ARMV7_IDX_TO_COUNTER(idx));
565}
566
567static inline int armv7_pmnc_select_counter(int idx)
568{
569 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
570 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
571 isb();
572
573 return idx;
574}
575
576static inline u32 armv7pmu_read_counter(struct perf_event *event)
577{
578 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
579 struct hw_perf_event *hwc = &event->hw;
580 int idx = hwc->idx;
581 u32 value = 0;
582
583 if (!armv7_pmnc_counter_valid(cpu_pmu, idx))
584 pr_err("CPU%u reading wrong counter %d\n",
585 smp_processor_id(), idx);
586 else if (idx == ARMV7_IDX_CYCLE_COUNTER)
587 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
588 else if (armv7_pmnc_select_counter(idx) == idx)
589 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
590
591 return value;
592}
593
594static inline void armv7pmu_write_counter(struct perf_event *event, u32 value)
595{
596 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
597 struct hw_perf_event *hwc = &event->hw;
598 int idx = hwc->idx;
599
600 if (!armv7_pmnc_counter_valid(cpu_pmu, idx))
601 pr_err("CPU%u writing wrong counter %d\n",
602 smp_processor_id(), idx);
603 else if (idx == ARMV7_IDX_CYCLE_COUNTER)
604 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
605 else if (armv7_pmnc_select_counter(idx) == idx)
606 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
607}
608
609static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
610{
611 if (armv7_pmnc_select_counter(idx) == idx) {
612 val &= ARMV7_EVTYPE_MASK;
613 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
614 }
615}
616
617static inline int armv7_pmnc_enable_counter(int idx)
618{
619 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
620 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
621 return idx;
622}
623
624static inline int armv7_pmnc_disable_counter(int idx)
625{
626 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
627 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
628 return idx;
629}
630
631static inline int armv7_pmnc_enable_intens(int idx)
632{
633 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
634 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
635 return idx;
636}
637
638static inline int armv7_pmnc_disable_intens(int idx)
639{
640 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
641 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
642 isb();
643
644 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (BIT(counter)));
645 isb();
646
647 return idx;
648}
649
650static inline u32 armv7_pmnc_getreset_flags(void)
651{
652 u32 val;
653
654
655 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
656
657
658 val &= ARMV7_FLAG_MASK;
659 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
660
661 return val;
662}
663
664#ifdef DEBUG
665static void armv7_pmnc_dump_regs(struct arm_pmu *cpu_pmu)
666{
667 u32 val;
668 unsigned int cnt;
669
670 printk(KERN_INFO "PMNC registers dump:\n");
671
672 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
673 printk(KERN_INFO "PMNC =0x%08x\n", val);
674
675 asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
676 printk(KERN_INFO "CNTENS=0x%08x\n", val);
677
678 asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
679 printk(KERN_INFO "INTENS=0x%08x\n", val);
680
681 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
682 printk(KERN_INFO "FLAGS =0x%08x\n", val);
683
684 asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
685 printk(KERN_INFO "SELECT=0x%08x\n", val);
686
687 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
688 printk(KERN_INFO "CCNT =0x%08x\n", val);
689
690 for (cnt = ARMV7_IDX_COUNTER0;
691 cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
692 armv7_pmnc_select_counter(cnt);
693 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
694 printk(KERN_INFO "CNT[%d] count =0x%08x\n",
695 ARMV7_IDX_TO_COUNTER(cnt), val);
696 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
697 printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
698 ARMV7_IDX_TO_COUNTER(cnt), val);
699 }
700}
701#endif
702
703static void armv7pmu_enable_event(struct perf_event *event)
704{
705 unsigned long flags;
706 struct hw_perf_event *hwc = &event->hw;
707 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
708 struct pmu_hw_events *events = cpu_pmu->get_hw_events();
709 int idx = hwc->idx;
710
711 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
712 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
713 smp_processor_id(), idx);
714 return;
715 }
716
717
718
719
720
721 raw_spin_lock_irqsave(&events->pmu_lock, flags);
722
723
724
725
726 armv7_pmnc_disable_counter(idx);
727
728
729
730
731
732
733 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER)
734 armv7_pmnc_write_evtsel(idx, hwc->config_base);
735
736
737
738
739 armv7_pmnc_enable_intens(idx);
740
741
742
743
744 armv7_pmnc_enable_counter(idx);
745
746 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
747}
748
749static void armv7pmu_disable_event(struct perf_event *event)
750{
751 unsigned long flags;
752 struct hw_perf_event *hwc = &event->hw;
753 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
754 struct pmu_hw_events *events = cpu_pmu->get_hw_events();
755 int idx = hwc->idx;
756
757 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
758 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
759 smp_processor_id(), idx);
760 return;
761 }
762
763
764
765
766 raw_spin_lock_irqsave(&events->pmu_lock, flags);
767
768
769
770
771 armv7_pmnc_disable_counter(idx);
772
773
774
775
776 armv7_pmnc_disable_intens(idx);
777
778 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
779}
780
781static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
782{
783 u32 pmnc;
784 struct perf_sample_data data;
785 struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
786 struct pmu_hw_events *cpuc = cpu_pmu->get_hw_events();
787 struct pt_regs *regs;
788 int idx;
789
790
791
792
793 pmnc = armv7_pmnc_getreset_flags();
794
795
796
797
798 if (!armv7_pmnc_has_overflowed(pmnc))
799 return IRQ_NONE;
800
801
802
803
804 regs = get_irq_regs();
805
806 for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
807 struct perf_event *event = cpuc->events[idx];
808 struct hw_perf_event *hwc;
809
810
811 if (!event)
812 continue;
813
814
815
816
817
818 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
819 continue;
820
821 hwc = &event->hw;
822 armpmu_event_update(event);
823 perf_sample_data_init(&data, 0, hwc->last_period);
824 if (!armpmu_event_set_period(event))
825 continue;
826
827 if (perf_event_overflow(event, &data, regs))
828 cpu_pmu->disable(event);
829 }
830
831
832
833
834
835
836
837
838 irq_work_run();
839
840 return IRQ_HANDLED;
841}
842
843static void armv7pmu_start(struct arm_pmu *cpu_pmu)
844{
845 unsigned long flags;
846 struct pmu_hw_events *events = cpu_pmu->get_hw_events();
847
848 raw_spin_lock_irqsave(&events->pmu_lock, flags);
849
850 armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
851 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
852}
853
854static void armv7pmu_stop(struct arm_pmu *cpu_pmu)
855{
856 unsigned long flags;
857 struct pmu_hw_events *events = cpu_pmu->get_hw_events();
858
859 raw_spin_lock_irqsave(&events->pmu_lock, flags);
860
861 armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
862 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
863}
864
865static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc,
866 struct perf_event *event)
867{
868 int idx;
869 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
870 struct hw_perf_event *hwc = &event->hw;
871 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT;
872
873
874 if (evtype == ARMV7_PERFCTR_CPU_CYCLES) {
875 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
876 return -EAGAIN;
877
878 return ARMV7_IDX_CYCLE_COUNTER;
879 }
880
881
882
883
884
885 for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
886 if (!test_and_set_bit(idx, cpuc->used_mask))
887 return idx;
888 }
889
890
891 return -EAGAIN;
892}
893
894
895
896
897static int armv7pmu_set_event_filter(struct hw_perf_event *event,
898 struct perf_event_attr *attr)
899{
900 unsigned long config_base = 0;
901
902 if (attr->exclude_idle)
903 return -EPERM;
904 if (attr->exclude_user)
905 config_base |= ARMV7_EXCLUDE_USER;
906 if (attr->exclude_kernel)
907 config_base |= ARMV7_EXCLUDE_PL1;
908 if (!attr->exclude_hv)
909 config_base |= ARMV7_INCLUDE_HYP;
910
911
912
913
914
915 event->config_base = config_base;
916
917 return 0;
918}
919
920static void armv7pmu_reset(void *info)
921{
922 struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
923 u32 idx, nb_cnt = cpu_pmu->num_events;
924
925
926 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
927 armv7_pmnc_disable_counter(idx);
928 armv7_pmnc_disable_intens(idx);
929 }
930
931
932 armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
933}
934
935static int armv7_a8_map_event(struct perf_event *event)
936{
937 return armpmu_map_event(event, &armv7_a8_perf_map,
938 &armv7_a8_perf_cache_map, 0xFF);
939}
940
941static int armv7_a9_map_event(struct perf_event *event)
942{
943 return armpmu_map_event(event, &armv7_a9_perf_map,
944 &armv7_a9_perf_cache_map, 0xFF);
945}
946
947static int armv7_a5_map_event(struct perf_event *event)
948{
949 return armpmu_map_event(event, &armv7_a5_perf_map,
950 &armv7_a5_perf_cache_map, 0xFF);
951}
952
953static int armv7_a15_map_event(struct perf_event *event)
954{
955 return armpmu_map_event(event, &armv7_a15_perf_map,
956 &armv7_a15_perf_cache_map, 0xFF);
957}
958
959static int armv7_a7_map_event(struct perf_event *event)
960{
961 return armpmu_map_event(event, &armv7_a7_perf_map,
962 &armv7_a7_perf_cache_map, 0xFF);
963}
964
965static int armv7_a12_map_event(struct perf_event *event)
966{
967 return armpmu_map_event(event, &armv7_a12_perf_map,
968 &armv7_a12_perf_cache_map, 0xFF);
969}
970
971static int krait_map_event(struct perf_event *event)
972{
973 return armpmu_map_event(event, &krait_perf_map,
974 &krait_perf_cache_map, 0xFFFFF);
975}
976
977static int krait_map_event_no_branch(struct perf_event *event)
978{
979 return armpmu_map_event(event, &krait_perf_map_no_branch,
980 &krait_perf_cache_map, 0xFFFFF);
981}
982
983static void armv7pmu_init(struct arm_pmu *cpu_pmu)
984{
985 cpu_pmu->handle_irq = armv7pmu_handle_irq;
986 cpu_pmu->enable = armv7pmu_enable_event;
987 cpu_pmu->disable = armv7pmu_disable_event;
988 cpu_pmu->read_counter = armv7pmu_read_counter;
989 cpu_pmu->write_counter = armv7pmu_write_counter;
990 cpu_pmu->get_event_idx = armv7pmu_get_event_idx;
991 cpu_pmu->start = armv7pmu_start;
992 cpu_pmu->stop = armv7pmu_stop;
993 cpu_pmu->reset = armv7pmu_reset;
994 cpu_pmu->max_period = (1LLU << 32) - 1;
995};
996
997static u32 armv7_read_num_pmnc_events(void)
998{
999 u32 nb_cnt;
1000
1001
1002 nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1003
1004
1005 return nb_cnt + 1;
1006}
1007
1008static int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1009{
1010 armv7pmu_init(cpu_pmu);
1011 cpu_pmu->name = "armv7_cortex_a8";
1012 cpu_pmu->map_event = armv7_a8_map_event;
1013 cpu_pmu->num_events = armv7_read_num_pmnc_events();
1014 return 0;
1015}
1016
1017static int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1018{
1019 armv7pmu_init(cpu_pmu);
1020 cpu_pmu->name = "armv7_cortex_a9";
1021 cpu_pmu->map_event = armv7_a9_map_event;
1022 cpu_pmu->num_events = armv7_read_num_pmnc_events();
1023 return 0;
1024}
1025
1026static int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1027{
1028 armv7pmu_init(cpu_pmu);
1029 cpu_pmu->name = "armv7_cortex_a5";
1030 cpu_pmu->map_event = armv7_a5_map_event;
1031 cpu_pmu->num_events = armv7_read_num_pmnc_events();
1032 return 0;
1033}
1034
1035static int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1036{
1037 armv7pmu_init(cpu_pmu);
1038 cpu_pmu->name = "armv7_cortex_a15";
1039 cpu_pmu->map_event = armv7_a15_map_event;
1040 cpu_pmu->num_events = armv7_read_num_pmnc_events();
1041 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1042 return 0;
1043}
1044
1045static int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1046{
1047 armv7pmu_init(cpu_pmu);
1048 cpu_pmu->name = "armv7_cortex_a7";
1049 cpu_pmu->map_event = armv7_a7_map_event;
1050 cpu_pmu->num_events = armv7_read_num_pmnc_events();
1051 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1052 return 0;
1053}
1054
1055static int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1056{
1057 armv7pmu_init(cpu_pmu);
1058 cpu_pmu->name = "armv7_cortex_a12";
1059 cpu_pmu->map_event = armv7_a12_map_event;
1060 cpu_pmu->num_events = armv7_read_num_pmnc_events();
1061 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1062 return 0;
1063}
1064
1065static int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
1066{
1067 armv7_a12_pmu_init(cpu_pmu);
1068 cpu_pmu->name = "armv7_cortex_a17";
1069 return 0;
1070}
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105#define KRAIT_EVENT (1 << 16)
1106#define VENUM_EVENT (2 << 16)
1107#define KRAIT_EVENT_MASK (KRAIT_EVENT | VENUM_EVENT)
1108#define PMRESRn_EN BIT(31)
1109
1110static u32 krait_read_pmresrn(int n)
1111{
1112 u32 val;
1113
1114 switch (n) {
1115 case 0:
1116 asm volatile("mrc p15, 1, %0, c9, c15, 0" : "=r" (val));
1117 break;
1118 case 1:
1119 asm volatile("mrc p15, 1, %0, c9, c15, 1" : "=r" (val));
1120 break;
1121 case 2:
1122 asm volatile("mrc p15, 1, %0, c9, c15, 2" : "=r" (val));
1123 break;
1124 default:
1125 BUG();
1126 }
1127
1128 return val;
1129}
1130
1131static void krait_write_pmresrn(int n, u32 val)
1132{
1133 switch (n) {
1134 case 0:
1135 asm volatile("mcr p15, 1, %0, c9, c15, 0" : : "r" (val));
1136 break;
1137 case 1:
1138 asm volatile("mcr p15, 1, %0, c9, c15, 1" : : "r" (val));
1139 break;
1140 case 2:
1141 asm volatile("mcr p15, 1, %0, c9, c15, 2" : : "r" (val));
1142 break;
1143 default:
1144 BUG();
1145 }
1146}
1147
1148static u32 krait_read_vpmresr0(void)
1149{
1150 u32 val;
1151 asm volatile("mrc p10, 7, %0, c11, c0, 0" : "=r" (val));
1152 return val;
1153}
1154
1155static void krait_write_vpmresr0(u32 val)
1156{
1157 asm volatile("mcr p10, 7, %0, c11, c0, 0" : : "r" (val));
1158}
1159
1160static void krait_pre_vpmresr0(u32 *venum_orig_val, u32 *fp_orig_val)
1161{
1162 u32 venum_new_val;
1163 u32 fp_new_val;
1164
1165 BUG_ON(preemptible());
1166
1167 *venum_orig_val = get_copro_access();
1168 venum_new_val = *venum_orig_val | CPACC_SVC(10) | CPACC_SVC(11);
1169 set_copro_access(venum_new_val);
1170
1171
1172 *fp_orig_val = fmrx(FPEXC);
1173 fp_new_val = *fp_orig_val | FPEXC_EN;
1174 fmxr(FPEXC, fp_new_val);
1175}
1176
1177static void krait_post_vpmresr0(u32 venum_orig_val, u32 fp_orig_val)
1178{
1179 BUG_ON(preemptible());
1180
1181 fmxr(FPEXC, fp_orig_val);
1182 isb();
1183
1184 set_copro_access(venum_orig_val);
1185}
1186
1187static u32 krait_get_pmresrn_event(unsigned int region)
1188{
1189 static const u32 pmresrn_table[] = { KRAIT_PMRESR0_GROUP0,
1190 KRAIT_PMRESR1_GROUP0,
1191 KRAIT_PMRESR2_GROUP0 };
1192 return pmresrn_table[region];
1193}
1194
1195static void krait_evt_setup(int idx, u32 config_base)
1196{
1197 u32 val;
1198 u32 mask;
1199 u32 vval, fval;
1200 unsigned int region;
1201 unsigned int group;
1202 unsigned int code;
1203 unsigned int group_shift;
1204 bool venum_event;
1205
1206 venum_event = !!(config_base & VENUM_EVENT);
1207 region = (config_base >> 12) & 0xf;
1208 code = (config_base >> 4) & 0xff;
1209 group = (config_base >> 0) & 0xf;
1210
1211 group_shift = group * 8;
1212 mask = 0xff << group_shift;
1213
1214
1215 if (venum_event)
1216 val = KRAIT_VPMRESR0_GROUP0;
1217 else
1218 val = krait_get_pmresrn_event(region);
1219 val += group;
1220
1221 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1222 armv7_pmnc_write_evtsel(idx, val);
1223
1224 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1225
1226 if (venum_event) {
1227 krait_pre_vpmresr0(&vval, &fval);
1228 val = krait_read_vpmresr0();
1229 val &= ~mask;
1230 val |= code << group_shift;
1231 val |= PMRESRn_EN;
1232 krait_write_vpmresr0(val);
1233 krait_post_vpmresr0(vval, fval);
1234 } else {
1235 val = krait_read_pmresrn(region);
1236 val &= ~mask;
1237 val |= code << group_shift;
1238 val |= PMRESRn_EN;
1239 krait_write_pmresrn(region, val);
1240 }
1241}
1242
1243static u32 krait_clear_pmresrn_group(u32 val, int group)
1244{
1245 u32 mask;
1246 int group_shift;
1247
1248 group_shift = group * 8;
1249 mask = 0xff << group_shift;
1250 val &= ~mask;
1251
1252
1253 if (val & ~PMRESRn_EN)
1254 return val |= PMRESRn_EN;
1255
1256 return 0;
1257}
1258
1259static void krait_clearpmu(u32 config_base)
1260{
1261 u32 val;
1262 u32 vval, fval;
1263 unsigned int region;
1264 unsigned int group;
1265 bool venum_event;
1266
1267 venum_event = !!(config_base & VENUM_EVENT);
1268 region = (config_base >> 12) & 0xf;
1269 group = (config_base >> 0) & 0xf;
1270
1271 if (venum_event) {
1272 krait_pre_vpmresr0(&vval, &fval);
1273 val = krait_read_vpmresr0();
1274 val = krait_clear_pmresrn_group(val, group);
1275 krait_write_vpmresr0(val);
1276 krait_post_vpmresr0(vval, fval);
1277 } else {
1278 val = krait_read_pmresrn(region);
1279 val = krait_clear_pmresrn_group(val, group);
1280 krait_write_pmresrn(region, val);
1281 }
1282}
1283
1284static void krait_pmu_disable_event(struct perf_event *event)
1285{
1286 unsigned long flags;
1287 struct hw_perf_event *hwc = &event->hw;
1288 int idx = hwc->idx;
1289 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1290 struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1291
1292
1293 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1294
1295
1296 armv7_pmnc_disable_counter(idx);
1297
1298
1299
1300
1301 if (hwc->config_base & KRAIT_EVENT_MASK)
1302 krait_clearpmu(hwc->config_base);
1303
1304
1305 armv7_pmnc_disable_intens(idx);
1306
1307 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1308}
1309
1310static void krait_pmu_enable_event(struct perf_event *event)
1311{
1312 unsigned long flags;
1313 struct hw_perf_event *hwc = &event->hw;
1314 int idx = hwc->idx;
1315 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1316 struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1317
1318
1319
1320
1321
1322 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1323
1324
1325 armv7_pmnc_disable_counter(idx);
1326
1327
1328
1329
1330
1331
1332 if (hwc->config_base & KRAIT_EVENT_MASK)
1333 krait_evt_setup(idx, hwc->config_base);
1334 else
1335 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1336
1337
1338 armv7_pmnc_enable_intens(idx);
1339
1340
1341 armv7_pmnc_enable_counter(idx);
1342
1343 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1344}
1345
1346static void krait_pmu_reset(void *info)
1347{
1348 u32 vval, fval;
1349
1350 armv7pmu_reset(info);
1351
1352
1353 krait_write_pmresrn(0, 0);
1354 krait_write_pmresrn(1, 0);
1355 krait_write_pmresrn(2, 0);
1356
1357 krait_pre_vpmresr0(&vval, &fval);
1358 krait_write_vpmresr0(0);
1359 krait_post_vpmresr0(vval, fval);
1360}
1361
1362static int krait_event_to_bit(struct perf_event *event, unsigned int region,
1363 unsigned int group)
1364{
1365 int bit;
1366 struct hw_perf_event *hwc = &event->hw;
1367 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1368
1369 if (hwc->config_base & VENUM_EVENT)
1370 bit = KRAIT_VPMRESR0_GROUP0;
1371 else
1372 bit = krait_get_pmresrn_event(region);
1373 bit -= krait_get_pmresrn_event(0);
1374 bit += group;
1375
1376
1377
1378
1379 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1380
1381 return bit;
1382}
1383
1384
1385
1386
1387
1388static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1389 struct perf_event *event)
1390{
1391 int idx;
1392 int bit = -1;
1393 unsigned int prefix;
1394 unsigned int region;
1395 unsigned int code;
1396 unsigned int group;
1397 bool krait_event;
1398 struct hw_perf_event *hwc = &event->hw;
1399
1400 region = (hwc->config_base >> 12) & 0xf;
1401 code = (hwc->config_base >> 4) & 0xff;
1402 group = (hwc->config_base >> 0) & 0xf;
1403 krait_event = !!(hwc->config_base & KRAIT_EVENT_MASK);
1404
1405 if (krait_event) {
1406
1407 if (group > 3 || region > 2)
1408 return -EINVAL;
1409 prefix = hwc->config_base & KRAIT_EVENT_MASK;
1410 if (prefix != KRAIT_EVENT && prefix != VENUM_EVENT)
1411 return -EINVAL;
1412 if (prefix == VENUM_EVENT && (code & 0xe0))
1413 return -EINVAL;
1414
1415 bit = krait_event_to_bit(event, region, group);
1416 if (test_and_set_bit(bit, cpuc->used_mask))
1417 return -EAGAIN;
1418 }
1419
1420 idx = armv7pmu_get_event_idx(cpuc, event);
1421 if (idx < 0 && bit >= 0)
1422 clear_bit(bit, cpuc->used_mask);
1423
1424 return idx;
1425}
1426
1427static void krait_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1428 struct perf_event *event)
1429{
1430 int bit;
1431 struct hw_perf_event *hwc = &event->hw;
1432 unsigned int region;
1433 unsigned int group;
1434 bool krait_event;
1435
1436 region = (hwc->config_base >> 12) & 0xf;
1437 group = (hwc->config_base >> 0) & 0xf;
1438 krait_event = !!(hwc->config_base & KRAIT_EVENT_MASK);
1439
1440 if (krait_event) {
1441 bit = krait_event_to_bit(event, region, group);
1442 clear_bit(bit, cpuc->used_mask);
1443 }
1444}
1445
1446static int krait_pmu_init(struct arm_pmu *cpu_pmu)
1447{
1448 armv7pmu_init(cpu_pmu);
1449 cpu_pmu->name = "armv7_krait";
1450
1451 if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node,
1452 "qcom,no-pc-write"))
1453 cpu_pmu->map_event = krait_map_event_no_branch;
1454 else
1455 cpu_pmu->map_event = krait_map_event;
1456 cpu_pmu->num_events = armv7_read_num_pmnc_events();
1457 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1458 cpu_pmu->reset = krait_pmu_reset;
1459 cpu_pmu->enable = krait_pmu_enable_event;
1460 cpu_pmu->disable = krait_pmu_disable_event;
1461 cpu_pmu->get_event_idx = krait_pmu_get_event_idx;
1462 cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx;
1463 return 0;
1464}
1465#else
1466static inline int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1467{
1468 return -ENODEV;
1469}
1470
1471static inline int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1472{
1473 return -ENODEV;
1474}
1475
1476static inline int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1477{
1478 return -ENODEV;
1479}
1480
1481static inline int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1482{
1483 return -ENODEV;
1484}
1485
1486static inline int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1487{
1488 return -ENODEV;
1489}
1490
1491static inline int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1492{
1493 return -ENODEV;
1494}
1495
1496static inline int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
1497{
1498 return -ENODEV;
1499}
1500
1501static inline int krait_pmu_init(struct arm_pmu *cpu_pmu)
1502{
1503 return -ENODEV;
1504}
1505#endif
1506