linux/arch/arm/kernel/perf_event_v7.c
<<
>>
Prefs
   1/*
   2 * ARMv7 Cortex-A8 and Cortex-A9 Performance Events handling code.
   3 *
   4 * ARMv7 support: Jean Pihet <jpihet@mvista.com>
   5 * 2010 (c) MontaVista Software, LLC.
   6 *
   7 * Copied from ARMv6 code, with the low level code inspired
   8 *  by the ARMv7 Oprofile code.
   9 *
  10 * Cortex-A8 has up to 4 configurable performance counters and
  11 *  a single cycle counter.
  12 * Cortex-A9 has up to 31 configurable performance counters and
  13 *  a single cycle counter.
  14 *
  15 * All counters can be enabled/disabled and IRQ masked separately. The cycle
  16 *  counter and all 4 performance counters together can be reset separately.
  17 */
  18
  19#ifdef CONFIG_CPU_V7
  20
  21static struct arm_pmu armv7pmu;
  22
  23/*
  24 * Common ARMv7 event types
  25 *
  26 * Note: An implementation may not be able to count all of these events
  27 * but the encodings are considered to be `reserved' in the case that
  28 * they are not available.
  29 */
  30enum armv7_perf_types {
  31        ARMV7_PERFCTR_PMNC_SW_INCR              = 0x00,
  32        ARMV7_PERFCTR_IFETCH_MISS               = 0x01,
  33        ARMV7_PERFCTR_ITLB_MISS                 = 0x02,
  34        ARMV7_PERFCTR_DCACHE_REFILL             = 0x03, /* L1 */
  35        ARMV7_PERFCTR_DCACHE_ACCESS             = 0x04, /* L1 */
  36        ARMV7_PERFCTR_DTLB_REFILL               = 0x05,
  37        ARMV7_PERFCTR_DREAD                     = 0x06,
  38        ARMV7_PERFCTR_DWRITE                    = 0x07,
  39        ARMV7_PERFCTR_INSTR_EXECUTED            = 0x08,
  40        ARMV7_PERFCTR_EXC_TAKEN                 = 0x09,
  41        ARMV7_PERFCTR_EXC_EXECUTED              = 0x0A,
  42        ARMV7_PERFCTR_CID_WRITE                 = 0x0B,
  43        /* ARMV7_PERFCTR_PC_WRITE is equivalent to HW_BRANCH_INSTRUCTIONS.
  44         * It counts:
  45         *  - all branch instructions,
  46         *  - instructions that explicitly write the PC,
  47         *  - exception generating instructions.
  48         */
  49        ARMV7_PERFCTR_PC_WRITE                  = 0x0C,
  50        ARMV7_PERFCTR_PC_IMM_BRANCH             = 0x0D,
  51        ARMV7_PERFCTR_PC_PROC_RETURN            = 0x0E,
  52        ARMV7_PERFCTR_UNALIGNED_ACCESS          = 0x0F,
  53
  54        /* These events are defined by the PMUv2 supplement (ARM DDI 0457A). */
  55        ARMV7_PERFCTR_PC_BRANCH_MIS_PRED        = 0x10,
  56        ARMV7_PERFCTR_CLOCK_CYCLES              = 0x11,
  57        ARMV7_PERFCTR_PC_BRANCH_PRED            = 0x12,
  58        ARMV7_PERFCTR_MEM_ACCESS                = 0x13,
  59        ARMV7_PERFCTR_L1_ICACHE_ACCESS          = 0x14,
  60        ARMV7_PERFCTR_L1_DCACHE_WB              = 0x15,
  61        ARMV7_PERFCTR_L2_DCACHE_ACCESS          = 0x16,
  62        ARMV7_PERFCTR_L2_DCACHE_REFILL          = 0x17,
  63        ARMV7_PERFCTR_L2_DCACHE_WB              = 0x18,
  64        ARMV7_PERFCTR_BUS_ACCESS                = 0x19,
  65        ARMV7_PERFCTR_MEMORY_ERROR              = 0x1A,
  66        ARMV7_PERFCTR_INSTR_SPEC                = 0x1B,
  67        ARMV7_PERFCTR_TTBR_WRITE                = 0x1C,
  68        ARMV7_PERFCTR_BUS_CYCLES                = 0x1D,
  69
  70        ARMV7_PERFCTR_CPU_CYCLES                = 0xFF
  71};
  72
  73/* ARMv7 Cortex-A8 specific event types */
  74enum armv7_a8_perf_types {
  75        ARMV7_PERFCTR_WRITE_BUFFER_FULL         = 0x40,
  76        ARMV7_PERFCTR_L2_STORE_MERGED           = 0x41,
  77        ARMV7_PERFCTR_L2_STORE_BUFF             = 0x42,
  78        ARMV7_PERFCTR_L2_ACCESS                 = 0x43,
  79        ARMV7_PERFCTR_L2_CACH_MISS              = 0x44,
  80        ARMV7_PERFCTR_AXI_READ_CYCLES           = 0x45,
  81        ARMV7_PERFCTR_AXI_WRITE_CYCLES          = 0x46,
  82        ARMV7_PERFCTR_MEMORY_REPLAY             = 0x47,
  83        ARMV7_PERFCTR_UNALIGNED_ACCESS_REPLAY   = 0x48,
  84        ARMV7_PERFCTR_L1_DATA_MISS              = 0x49,
  85        ARMV7_PERFCTR_L1_INST_MISS              = 0x4A,
  86        ARMV7_PERFCTR_L1_DATA_COLORING          = 0x4B,
  87        ARMV7_PERFCTR_L1_NEON_DATA              = 0x4C,
  88        ARMV7_PERFCTR_L1_NEON_CACH_DATA         = 0x4D,
  89        ARMV7_PERFCTR_L2_NEON                   = 0x4E,
  90        ARMV7_PERFCTR_L2_NEON_HIT               = 0x4F,
  91        ARMV7_PERFCTR_L1_INST                   = 0x50,
  92        ARMV7_PERFCTR_PC_RETURN_MIS_PRED        = 0x51,
  93        ARMV7_PERFCTR_PC_BRANCH_FAILED          = 0x52,
  94        ARMV7_PERFCTR_PC_BRANCH_TAKEN           = 0x53,
  95        ARMV7_PERFCTR_PC_BRANCH_EXECUTED        = 0x54,
  96        ARMV7_PERFCTR_OP_EXECUTED               = 0x55,
  97        ARMV7_PERFCTR_CYCLES_INST_STALL         = 0x56,
  98        ARMV7_PERFCTR_CYCLES_INST               = 0x57,
  99        ARMV7_PERFCTR_CYCLES_NEON_DATA_STALL    = 0x58,
 100        ARMV7_PERFCTR_CYCLES_NEON_INST_STALL    = 0x59,
 101        ARMV7_PERFCTR_NEON_CYCLES               = 0x5A,
 102
 103        ARMV7_PERFCTR_PMU0_EVENTS               = 0x70,
 104        ARMV7_PERFCTR_PMU1_EVENTS               = 0x71,
 105        ARMV7_PERFCTR_PMU_EVENTS                = 0x72,
 106};
 107
 108/* ARMv7 Cortex-A9 specific event types */
 109enum armv7_a9_perf_types {
 110        ARMV7_PERFCTR_JAVA_HW_BYTECODE_EXEC     = 0x40,
 111        ARMV7_PERFCTR_JAVA_SW_BYTECODE_EXEC     = 0x41,
 112        ARMV7_PERFCTR_JAZELLE_BRANCH_EXEC       = 0x42,
 113
 114        ARMV7_PERFCTR_COHERENT_LINE_MISS        = 0x50,
 115        ARMV7_PERFCTR_COHERENT_LINE_HIT         = 0x51,
 116
 117        ARMV7_PERFCTR_ICACHE_DEP_STALL_CYCLES   = 0x60,
 118        ARMV7_PERFCTR_DCACHE_DEP_STALL_CYCLES   = 0x61,
 119        ARMV7_PERFCTR_TLB_MISS_DEP_STALL_CYCLES = 0x62,
 120        ARMV7_PERFCTR_STREX_EXECUTED_PASSED     = 0x63,
 121        ARMV7_PERFCTR_STREX_EXECUTED_FAILED     = 0x64,
 122        ARMV7_PERFCTR_DATA_EVICTION             = 0x65,
 123        ARMV7_PERFCTR_ISSUE_STAGE_NO_INST       = 0x66,
 124        ARMV7_PERFCTR_ISSUE_STAGE_EMPTY         = 0x67,
 125        ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE  = 0x68,
 126
 127        ARMV7_PERFCTR_PREDICTABLE_FUNCT_RETURNS = 0x6E,
 128
 129        ARMV7_PERFCTR_MAIN_UNIT_EXECUTED_INST   = 0x70,
 130        ARMV7_PERFCTR_SECOND_UNIT_EXECUTED_INST = 0x71,
 131        ARMV7_PERFCTR_LD_ST_UNIT_EXECUTED_INST  = 0x72,
 132        ARMV7_PERFCTR_FP_EXECUTED_INST          = 0x73,
 133        ARMV7_PERFCTR_NEON_EXECUTED_INST        = 0x74,
 134
 135        ARMV7_PERFCTR_PLD_FULL_DEP_STALL_CYCLES = 0x80,
 136        ARMV7_PERFCTR_DATA_WR_DEP_STALL_CYCLES  = 0x81,
 137        ARMV7_PERFCTR_ITLB_MISS_DEP_STALL_CYCLES        = 0x82,
 138        ARMV7_PERFCTR_DTLB_MISS_DEP_STALL_CYCLES        = 0x83,
 139        ARMV7_PERFCTR_MICRO_ITLB_MISS_DEP_STALL_CYCLES  = 0x84,
 140        ARMV7_PERFCTR_MICRO_DTLB_MISS_DEP_STALL_CYCLES  = 0x85,
 141        ARMV7_PERFCTR_DMB_DEP_STALL_CYCLES      = 0x86,
 142
 143        ARMV7_PERFCTR_INTGR_CLK_ENABLED_CYCLES  = 0x8A,
 144        ARMV7_PERFCTR_DATA_ENGINE_CLK_EN_CYCLES = 0x8B,
 145
 146        ARMV7_PERFCTR_ISB_INST                  = 0x90,
 147        ARMV7_PERFCTR_DSB_INST                  = 0x91,
 148        ARMV7_PERFCTR_DMB_INST                  = 0x92,
 149        ARMV7_PERFCTR_EXT_INTERRUPTS            = 0x93,
 150
 151        ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_COMPLETED     = 0xA0,
 152        ARMV7_PERFCTR_PLE_CACHE_LINE_RQST_SKIPPED       = 0xA1,
 153        ARMV7_PERFCTR_PLE_FIFO_FLUSH            = 0xA2,
 154        ARMV7_PERFCTR_PLE_RQST_COMPLETED        = 0xA3,
 155        ARMV7_PERFCTR_PLE_FIFO_OVERFLOW         = 0xA4,
 156        ARMV7_PERFCTR_PLE_RQST_PROG             = 0xA5
 157};
 158
 159/* ARMv7 Cortex-A5 specific event types */
 160enum armv7_a5_perf_types {
 161        ARMV7_PERFCTR_IRQ_TAKEN                 = 0x86,
 162        ARMV7_PERFCTR_FIQ_TAKEN                 = 0x87,
 163
 164        ARMV7_PERFCTR_EXT_MEM_RQST              = 0xc0,
 165        ARMV7_PERFCTR_NC_EXT_MEM_RQST           = 0xc1,
 166        ARMV7_PERFCTR_PREFETCH_LINEFILL         = 0xc2,
 167        ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP    = 0xc3,
 168        ARMV7_PERFCTR_ENTER_READ_ALLOC          = 0xc4,
 169        ARMV7_PERFCTR_READ_ALLOC                = 0xc5,
 170
 171        ARMV7_PERFCTR_STALL_SB_FULL             = 0xc9,
 172};
 173
 174/* ARMv7 Cortex-A15 specific event types */
 175enum armv7_a15_perf_types {
 176        ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS     = 0x40,
 177        ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS    = 0x41,
 178        ARMV7_PERFCTR_L1_DCACHE_READ_REFILL     = 0x42,
 179        ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL    = 0x43,
 180
 181        ARMV7_PERFCTR_L1_DTLB_READ_REFILL       = 0x4C,
 182        ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL      = 0x4D,
 183
 184        ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS     = 0x50,
 185        ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS    = 0x51,
 186        ARMV7_PERFCTR_L2_DCACHE_READ_REFILL     = 0x52,
 187        ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL    = 0x53,
 188
 189        ARMV7_PERFCTR_SPEC_PC_WRITE             = 0x76,
 190};
 191
 192/*
 193 * Cortex-A8 HW events mapping
 194 *
 195 * The hardware events that we support. We do support cache operations but
 196 * we have harvard caches and no way to combine instruction and data
 197 * accesses/misses in hardware.
 198 */
 199static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
 200        [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
 201        [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
 202        [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
 203        [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
 204        [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
 205        [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 206        [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
 207};
 208
 209static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 210                                          [PERF_COUNT_HW_CACHE_OP_MAX]
 211                                          [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 212        [C(L1D)] = {
 213                /*
 214                 * The performance counters don't differentiate between read
 215                 * and write accesses/misses so this isn't strictly correct,
 216                 * but it's the best we can do. Writes and reads get
 217                 * combined.
 218                 */
 219                [C(OP_READ)] = {
 220                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
 221                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
 222                },
 223                [C(OP_WRITE)] = {
 224                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
 225                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
 226                },
 227                [C(OP_PREFETCH)] = {
 228                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 229                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 230                },
 231        },
 232        [C(L1I)] = {
 233                [C(OP_READ)] = {
 234                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
 235                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
 236                },
 237                [C(OP_WRITE)] = {
 238                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_INST,
 239                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_L1_INST_MISS,
 240                },
 241                [C(OP_PREFETCH)] = {
 242                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 243                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 244                },
 245        },
 246        [C(LL)] = {
 247                [C(OP_READ)] = {
 248                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
 249                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
 250                },
 251                [C(OP_WRITE)] = {
 252                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L2_ACCESS,
 253                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_L2_CACH_MISS,
 254                },
 255                [C(OP_PREFETCH)] = {
 256                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 257                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 258                },
 259        },
 260        [C(DTLB)] = {
 261                [C(OP_READ)] = {
 262                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 263                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
 264                },
 265                [C(OP_WRITE)] = {
 266                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 267                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
 268                },
 269                [C(OP_PREFETCH)] = {
 270                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 271                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 272                },
 273        },
 274        [C(ITLB)] = {
 275                [C(OP_READ)] = {
 276                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 277                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 278                },
 279                [C(OP_WRITE)] = {
 280                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 281                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 282                },
 283                [C(OP_PREFETCH)] = {
 284                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 285                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 286                },
 287        },
 288        [C(BPU)] = {
 289                [C(OP_READ)] = {
 290                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
 291                        [C(RESULT_MISS)]
 292                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 293                },
 294                [C(OP_WRITE)] = {
 295                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
 296                        [C(RESULT_MISS)]
 297                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 298                },
 299                [C(OP_PREFETCH)] = {
 300                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 301                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 302                },
 303        },
 304        [C(NODE)] = {
 305                [C(OP_READ)] = {
 306                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 307                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 308                },
 309                [C(OP_WRITE)] = {
 310                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 311                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 312                },
 313                [C(OP_PREFETCH)] = {
 314                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 315                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 316                },
 317        },
 318};
 319
 320/*
 321 * Cortex-A9 HW events mapping
 322 */
 323static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
 324        [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
 325        [PERF_COUNT_HW_INSTRUCTIONS]        =
 326                                        ARMV7_PERFCTR_INST_OUT_OF_RENAME_STAGE,
 327        [PERF_COUNT_HW_CACHE_REFERENCES]    = ARMV7_PERFCTR_DCACHE_ACCESS,
 328        [PERF_COUNT_HW_CACHE_MISSES]        = ARMV7_PERFCTR_DCACHE_REFILL,
 329        [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
 330        [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 331        [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_CLOCK_CYCLES,
 332};
 333
 334static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 335                                          [PERF_COUNT_HW_CACHE_OP_MAX]
 336                                          [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 337        [C(L1D)] = {
 338                /*
 339                 * The performance counters don't differentiate between read
 340                 * and write accesses/misses so this isn't strictly correct,
 341                 * but it's the best we can do. Writes and reads get
 342                 * combined.
 343                 */
 344                [C(OP_READ)] = {
 345                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
 346                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
 347                },
 348                [C(OP_WRITE)] = {
 349                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_DCACHE_ACCESS,
 350                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DCACHE_REFILL,
 351                },
 352                [C(OP_PREFETCH)] = {
 353                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 354                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 355                },
 356        },
 357        [C(L1I)] = {
 358                [C(OP_READ)] = {
 359                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 360                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
 361                },
 362                [C(OP_WRITE)] = {
 363                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 364                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
 365                },
 366                [C(OP_PREFETCH)] = {
 367                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 368                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 369                },
 370        },
 371        [C(LL)] = {
 372                [C(OP_READ)] = {
 373                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 374                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 375                },
 376                [C(OP_WRITE)] = {
 377                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 378                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 379                },
 380                [C(OP_PREFETCH)] = {
 381                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 382                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 383                },
 384        },
 385        [C(DTLB)] = {
 386                [C(OP_READ)] = {
 387                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 388                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
 389                },
 390                [C(OP_WRITE)] = {
 391                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 392                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
 393                },
 394                [C(OP_PREFETCH)] = {
 395                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 396                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 397                },
 398        },
 399        [C(ITLB)] = {
 400                [C(OP_READ)] = {
 401                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 402                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 403                },
 404                [C(OP_WRITE)] = {
 405                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 406                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 407                },
 408                [C(OP_PREFETCH)] = {
 409                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 410                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 411                },
 412        },
 413        [C(BPU)] = {
 414                [C(OP_READ)] = {
 415                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
 416                        [C(RESULT_MISS)]
 417                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 418                },
 419                [C(OP_WRITE)] = {
 420                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_WRITE,
 421                        [C(RESULT_MISS)]
 422                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 423                },
 424                [C(OP_PREFETCH)] = {
 425                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 426                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 427                },
 428        },
 429        [C(NODE)] = {
 430                [C(OP_READ)] = {
 431                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 432                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 433                },
 434                [C(OP_WRITE)] = {
 435                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 436                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 437                },
 438                [C(OP_PREFETCH)] = {
 439                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 440                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 441                },
 442        },
 443};
 444
 445/*
 446 * Cortex-A5 HW events mapping
 447 */
 448static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
 449        [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
 450        [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
 451        [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
 452        [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
 453        [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
 454        [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 455        [PERF_COUNT_HW_BUS_CYCLES]          = HW_OP_UNSUPPORTED,
 456};
 457
 458static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 459                                        [PERF_COUNT_HW_CACHE_OP_MAX]
 460                                        [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 461        [C(L1D)] = {
 462                [C(OP_READ)] = {
 463                        [C(RESULT_ACCESS)]
 464                                        = ARMV7_PERFCTR_DCACHE_ACCESS,
 465                        [C(RESULT_MISS)]
 466                                        = ARMV7_PERFCTR_DCACHE_REFILL,
 467                },
 468                [C(OP_WRITE)] = {
 469                        [C(RESULT_ACCESS)]
 470                                        = ARMV7_PERFCTR_DCACHE_ACCESS,
 471                        [C(RESULT_MISS)]
 472                                        = ARMV7_PERFCTR_DCACHE_REFILL,
 473                },
 474                [C(OP_PREFETCH)] = {
 475                        [C(RESULT_ACCESS)]
 476                                        = ARMV7_PERFCTR_PREFETCH_LINEFILL,
 477                        [C(RESULT_MISS)]
 478                                        = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
 479                },
 480        },
 481        [C(L1I)] = {
 482                [C(OP_READ)] = {
 483                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 484                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
 485                },
 486                [C(OP_WRITE)] = {
 487                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 488                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
 489                },
 490                /*
 491                 * The prefetch counters don't differentiate between the I
 492                 * side and the D side.
 493                 */
 494                [C(OP_PREFETCH)] = {
 495                        [C(RESULT_ACCESS)]
 496                                        = ARMV7_PERFCTR_PREFETCH_LINEFILL,
 497                        [C(RESULT_MISS)]
 498                                        = ARMV7_PERFCTR_PREFETCH_LINEFILL_DROP,
 499                },
 500        },
 501        [C(LL)] = {
 502                [C(OP_READ)] = {
 503                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 504                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 505                },
 506                [C(OP_WRITE)] = {
 507                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 508                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 509                },
 510                [C(OP_PREFETCH)] = {
 511                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 512                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 513                },
 514        },
 515        [C(DTLB)] = {
 516                [C(OP_READ)] = {
 517                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 518                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
 519                },
 520                [C(OP_WRITE)] = {
 521                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 522                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_DTLB_REFILL,
 523                },
 524                [C(OP_PREFETCH)] = {
 525                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 526                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 527                },
 528        },
 529        [C(ITLB)] = {
 530                [C(OP_READ)] = {
 531                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 532                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 533                },
 534                [C(OP_WRITE)] = {
 535                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 536                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 537                },
 538                [C(OP_PREFETCH)] = {
 539                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 540                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 541                },
 542        },
 543        [C(BPU)] = {
 544                [C(OP_READ)] = {
 545                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
 546                        [C(RESULT_MISS)]
 547                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 548                },
 549                [C(OP_WRITE)] = {
 550                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
 551                        [C(RESULT_MISS)]
 552                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 553                },
 554                [C(OP_PREFETCH)] = {
 555                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 556                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 557                },
 558        },
 559};
 560
 561/*
 562 * Cortex-A15 HW events mapping
 563 */
 564static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
 565        [PERF_COUNT_HW_CPU_CYCLES]          = ARMV7_PERFCTR_CPU_CYCLES,
 566        [PERF_COUNT_HW_INSTRUCTIONS]        = ARMV7_PERFCTR_INSTR_EXECUTED,
 567        [PERF_COUNT_HW_CACHE_REFERENCES]    = HW_OP_UNSUPPORTED,
 568        [PERF_COUNT_HW_CACHE_MISSES]        = HW_OP_UNSUPPORTED,
 569        [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_SPEC_PC_WRITE,
 570        [PERF_COUNT_HW_BRANCH_MISSES]       = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 571        [PERF_COUNT_HW_BUS_CYCLES]          = ARMV7_PERFCTR_BUS_CYCLES,
 572};
 573
 574static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
 575                                        [PERF_COUNT_HW_CACHE_OP_MAX]
 576                                        [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
 577        [C(L1D)] = {
 578                [C(OP_READ)] = {
 579                        [C(RESULT_ACCESS)]
 580                                        = ARMV7_PERFCTR_L1_DCACHE_READ_ACCESS,
 581                        [C(RESULT_MISS)]
 582                                        = ARMV7_PERFCTR_L1_DCACHE_READ_REFILL,
 583                },
 584                [C(OP_WRITE)] = {
 585                        [C(RESULT_ACCESS)]
 586                                        = ARMV7_PERFCTR_L1_DCACHE_WRITE_ACCESS,
 587                        [C(RESULT_MISS)]
 588                                        = ARMV7_PERFCTR_L1_DCACHE_WRITE_REFILL,
 589                },
 590                [C(OP_PREFETCH)] = {
 591                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 592                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 593                },
 594        },
 595        [C(L1I)] = {
 596                /*
 597                 * Not all performance counters differentiate between read
 598                 * and write accesses/misses so we're not always strictly
 599                 * correct, but it's the best we can do. Writes and reads get
 600                 * combined in these cases.
 601                 */
 602                [C(OP_READ)] = {
 603                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 604                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
 605                },
 606                [C(OP_WRITE)] = {
 607                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
 608                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_IFETCH_MISS,
 609                },
 610                [C(OP_PREFETCH)] = {
 611                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 612                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 613                },
 614        },
 615        [C(LL)] = {
 616                [C(OP_READ)] = {
 617                        [C(RESULT_ACCESS)]
 618                                        = ARMV7_PERFCTR_L2_DCACHE_READ_ACCESS,
 619                        [C(RESULT_MISS)]
 620                                        = ARMV7_PERFCTR_L2_DCACHE_READ_REFILL,
 621                },
 622                [C(OP_WRITE)] = {
 623                        [C(RESULT_ACCESS)]
 624                                        = ARMV7_PERFCTR_L2_DCACHE_WRITE_ACCESS,
 625                        [C(RESULT_MISS)]
 626                                        = ARMV7_PERFCTR_L2_DCACHE_WRITE_REFILL,
 627                },
 628                [C(OP_PREFETCH)] = {
 629                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 630                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 631                },
 632        },
 633        [C(DTLB)] = {
 634                [C(OP_READ)] = {
 635                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 636                        [C(RESULT_MISS)]
 637                                        = ARMV7_PERFCTR_L1_DTLB_READ_REFILL,
 638                },
 639                [C(OP_WRITE)] = {
 640                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 641                        [C(RESULT_MISS)]
 642                                        = ARMV7_PERFCTR_L1_DTLB_WRITE_REFILL,
 643                },
 644                [C(OP_PREFETCH)] = {
 645                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 646                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 647                },
 648        },
 649        [C(ITLB)] = {
 650                [C(OP_READ)] = {
 651                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 652                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 653                },
 654                [C(OP_WRITE)] = {
 655                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 656                        [C(RESULT_MISS)]        = ARMV7_PERFCTR_ITLB_MISS,
 657                },
 658                [C(OP_PREFETCH)] = {
 659                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 660                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 661                },
 662        },
 663        [C(BPU)] = {
 664                [C(OP_READ)] = {
 665                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
 666                        [C(RESULT_MISS)]
 667                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 668                },
 669                [C(OP_WRITE)] = {
 670                        [C(RESULT_ACCESS)]      = ARMV7_PERFCTR_PC_BRANCH_PRED,
 671                        [C(RESULT_MISS)]
 672                                        = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
 673                },
 674                [C(OP_PREFETCH)] = {
 675                        [C(RESULT_ACCESS)]      = CACHE_OP_UNSUPPORTED,
 676                        [C(RESULT_MISS)]        = CACHE_OP_UNSUPPORTED,
 677                },
 678        },
 679};
 680
 681/*
 682 * Perf Events' indices
 683 */
 684#define ARMV7_IDX_CYCLE_COUNTER 0
 685#define ARMV7_IDX_COUNTER0      1
 686#define ARMV7_IDX_COUNTER_LAST  (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
 687
 688#define ARMV7_MAX_COUNTERS      32
 689#define ARMV7_COUNTER_MASK      (ARMV7_MAX_COUNTERS - 1)
 690
 691/*
 692 * ARMv7 low level PMNC access
 693 */
 694
 695/*
 696 * Perf Event to low level counters mapping
 697 */
 698#define ARMV7_IDX_TO_COUNTER(x) \
 699        (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
 700
 701/*
 702 * Per-CPU PMNC: config reg
 703 */
 704#define ARMV7_PMNC_E            (1 << 0) /* Enable all counters */
 705#define ARMV7_PMNC_P            (1 << 1) /* Reset all counters */
 706#define ARMV7_PMNC_C            (1 << 2) /* Cycle counter reset */
 707#define ARMV7_PMNC_D            (1 << 3) /* CCNT counts every 64th cpu cycle */
 708#define ARMV7_PMNC_X            (1 << 4) /* Export to ETM */
 709#define ARMV7_PMNC_DP           (1 << 5) /* Disable CCNT if non-invasive debug*/
 710#define ARMV7_PMNC_N_SHIFT      11       /* Number of counters supported */
 711#define ARMV7_PMNC_N_MASK       0x1f
 712#define ARMV7_PMNC_MASK         0x3f     /* Mask for writable bits */
 713
 714/*
 715 * FLAG: counters overflow flag status reg
 716 */
 717#define ARMV7_FLAG_MASK         0xffffffff      /* Mask for writable bits */
 718#define ARMV7_OVERFLOWED_MASK   ARMV7_FLAG_MASK
 719
 720/*
 721 * PMXEVTYPER: Event selection reg
 722 */
 723#define ARMV7_EVTYPE_MASK       0xc00000ff      /* Mask for writable bits */
 724#define ARMV7_EVTYPE_EVENT      0xff            /* Mask for EVENT bits */
 725
 726/*
 727 * Event filters for PMUv2
 728 */
 729#define ARMV7_EXCLUDE_PL1       (1 << 31)
 730#define ARMV7_EXCLUDE_USER      (1 << 30)
 731#define ARMV7_INCLUDE_HYP       (1 << 27)
 732
 733static inline u32 armv7_pmnc_read(void)
 734{
 735        u32 val;
 736        asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
 737        return val;
 738}
 739
 740static inline void armv7_pmnc_write(u32 val)
 741{
 742        val &= ARMV7_PMNC_MASK;
 743        isb();
 744        asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
 745}
 746
 747static inline int armv7_pmnc_has_overflowed(u32 pmnc)
 748{
 749        return pmnc & ARMV7_OVERFLOWED_MASK;
 750}
 751
 752static inline int armv7_pmnc_counter_valid(int idx)
 753{
 754        return idx >= ARMV7_IDX_CYCLE_COUNTER && idx <= ARMV7_IDX_COUNTER_LAST;
 755}
 756
 757static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
 758{
 759        int ret = 0;
 760        u32 counter;
 761
 762        if (!armv7_pmnc_counter_valid(idx)) {
 763                pr_err("CPU%u checking wrong counter %d overflow status\n",
 764                        smp_processor_id(), idx);
 765        } else {
 766                counter = ARMV7_IDX_TO_COUNTER(idx);
 767                ret = pmnc & BIT(counter);
 768        }
 769
 770        return ret;
 771}
 772
 773static inline int armv7_pmnc_select_counter(int idx)
 774{
 775        u32 counter;
 776
 777        if (!armv7_pmnc_counter_valid(idx)) {
 778                pr_err("CPU%u selecting wrong PMNC counter %d\n",
 779                        smp_processor_id(), idx);
 780                return -EINVAL;
 781        }
 782
 783        counter = ARMV7_IDX_TO_COUNTER(idx);
 784        asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
 785        isb();
 786
 787        return idx;
 788}
 789
 790static inline u32 armv7pmu_read_counter(int idx)
 791{
 792        u32 value = 0;
 793
 794        if (!armv7_pmnc_counter_valid(idx))
 795                pr_err("CPU%u reading wrong counter %d\n",
 796                        smp_processor_id(), idx);
 797        else if (idx == ARMV7_IDX_CYCLE_COUNTER)
 798                asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
 799        else if (armv7_pmnc_select_counter(idx) == idx)
 800                asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
 801
 802        return value;
 803}
 804
 805static inline void armv7pmu_write_counter(int idx, u32 value)
 806{
 807        if (!armv7_pmnc_counter_valid(idx))
 808                pr_err("CPU%u writing wrong counter %d\n",
 809                        smp_processor_id(), idx);
 810        else if (idx == ARMV7_IDX_CYCLE_COUNTER)
 811                asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" (value));
 812        else if (armv7_pmnc_select_counter(idx) == idx)
 813                asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" (value));
 814}
 815
 816static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
 817{
 818        if (armv7_pmnc_select_counter(idx) == idx) {
 819                val &= ARMV7_EVTYPE_MASK;
 820                asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
 821        }
 822}
 823
 824static inline int armv7_pmnc_enable_counter(int idx)
 825{
 826        u32 counter;
 827
 828        if (!armv7_pmnc_counter_valid(idx)) {
 829                pr_err("CPU%u enabling wrong PMNC counter %d\n",
 830                        smp_processor_id(), idx);
 831                return -EINVAL;
 832        }
 833
 834        counter = ARMV7_IDX_TO_COUNTER(idx);
 835        asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
 836        return idx;
 837}
 838
 839static inline int armv7_pmnc_disable_counter(int idx)
 840{
 841        u32 counter;
 842
 843        if (!armv7_pmnc_counter_valid(idx)) {
 844                pr_err("CPU%u disabling wrong PMNC counter %d\n",
 845                        smp_processor_id(), idx);
 846                return -EINVAL;
 847        }
 848
 849        counter = ARMV7_IDX_TO_COUNTER(idx);
 850        asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
 851        return idx;
 852}
 853
 854static inline int armv7_pmnc_enable_intens(int idx)
 855{
 856        u32 counter;
 857
 858        if (!armv7_pmnc_counter_valid(idx)) {
 859                pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
 860                        smp_processor_id(), idx);
 861                return -EINVAL;
 862        }
 863
 864        counter = ARMV7_IDX_TO_COUNTER(idx);
 865        asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
 866        return idx;
 867}
 868
 869static inline int armv7_pmnc_disable_intens(int idx)
 870{
 871        u32 counter;
 872
 873        if (!armv7_pmnc_counter_valid(idx)) {
 874                pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
 875                        smp_processor_id(), idx);
 876                return -EINVAL;
 877        }
 878
 879        counter = ARMV7_IDX_TO_COUNTER(idx);
 880        asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
 881        return idx;
 882}
 883
 884static inline u32 armv7_pmnc_getreset_flags(void)
 885{
 886        u32 val;
 887
 888        /* Read */
 889        asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
 890
 891        /* Write to clear flags */
 892        val &= ARMV7_FLAG_MASK;
 893        asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
 894
 895        return val;
 896}
 897
 898#ifdef DEBUG
 899static void armv7_pmnc_dump_regs(void)
 900{
 901        u32 val;
 902        unsigned int cnt;
 903
 904        printk(KERN_INFO "PMNC registers dump:\n");
 905
 906        asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
 907        printk(KERN_INFO "PMNC  =0x%08x\n", val);
 908
 909        asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
 910        printk(KERN_INFO "CNTENS=0x%08x\n", val);
 911
 912        asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
 913        printk(KERN_INFO "INTENS=0x%08x\n", val);
 914
 915        asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
 916        printk(KERN_INFO "FLAGS =0x%08x\n", val);
 917
 918        asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
 919        printk(KERN_INFO "SELECT=0x%08x\n", val);
 920
 921        asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
 922        printk(KERN_INFO "CCNT  =0x%08x\n", val);
 923
 924        for (cnt = ARMV7_IDX_COUNTER0; cnt <= ARMV7_IDX_COUNTER_LAST; cnt++) {
 925                armv7_pmnc_select_counter(cnt);
 926                asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
 927                printk(KERN_INFO "CNT[%d] count =0x%08x\n",
 928                        ARMV7_IDX_TO_COUNTER(cnt), val);
 929                asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
 930                printk(KERN_INFO "CNT[%d] evtsel=0x%08x\n",
 931                        ARMV7_IDX_TO_COUNTER(cnt), val);
 932        }
 933}
 934#endif
 935
 936static void armv7pmu_enable_event(struct hw_perf_event *hwc, int idx)
 937{
 938        unsigned long flags;
 939        struct pmu_hw_events *events = cpu_pmu->get_hw_events();
 940
 941        /*
 942         * Enable counter and interrupt, and set the counter to count
 943         * the event that we're interested in.
 944         */
 945        raw_spin_lock_irqsave(&events->pmu_lock, flags);
 946
 947        /*
 948         * Disable counter
 949         */
 950        armv7_pmnc_disable_counter(idx);
 951
 952        /*
 953         * Set event (if destined for PMNx counters)
 954         * We only need to set the event for the cycle counter if we
 955         * have the ability to perform event filtering.
 956         */
 957        if (armv7pmu.set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER)
 958                armv7_pmnc_write_evtsel(idx, hwc->config_base);
 959
 960        /*
 961         * Enable interrupt for this counter
 962         */
 963        armv7_pmnc_enable_intens(idx);
 964
 965        /*
 966         * Enable counter
 967         */
 968        armv7_pmnc_enable_counter(idx);
 969
 970        raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
 971}
 972
 973static void armv7pmu_disable_event(struct hw_perf_event *hwc, int idx)
 974{
 975        unsigned long flags;
 976        struct pmu_hw_events *events = cpu_pmu->get_hw_events();
 977
 978        /*
 979         * Disable counter and interrupt
 980         */
 981        raw_spin_lock_irqsave(&events->pmu_lock, flags);
 982
 983        /*
 984         * Disable counter
 985         */
 986        armv7_pmnc_disable_counter(idx);
 987
 988        /*
 989         * Disable interrupt for this counter
 990         */
 991        armv7_pmnc_disable_intens(idx);
 992
 993        raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
 994}
 995
 996static irqreturn_t armv7pmu_handle_irq(int irq_num, void *dev)
 997{
 998        u32 pmnc;
 999        struct perf_sample_data data;
1000        struct pmu_hw_events *cpuc;
1001        struct pt_regs *regs;
1002        int idx;
1003
1004        /*
1005         * Get and reset the IRQ flags
1006         */
1007        pmnc = armv7_pmnc_getreset_flags();
1008
1009        /*
1010         * Did an overflow occur?
1011         */
1012        if (!armv7_pmnc_has_overflowed(pmnc))
1013                return IRQ_NONE;
1014
1015        /*
1016         * Handle the counter(s) overflow(s)
1017         */
1018        regs = get_irq_regs();
1019
1020        perf_sample_data_init(&data, 0);
1021
1022        cpuc = &__get_cpu_var(cpu_hw_events);
1023        for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
1024                struct perf_event *event = cpuc->events[idx];
1025                struct hw_perf_event *hwc;
1026
1027                /*
1028                 * We have a single interrupt for all counters. Check that
1029                 * each counter has overflowed before we process it.
1030                 */
1031                if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
1032                        continue;
1033
1034                hwc = &event->hw;
1035                armpmu_event_update(event, hwc, idx, 1);
1036                data.period = event->hw.last_period;
1037                if (!armpmu_event_set_period(event, hwc, idx))
1038                        continue;
1039
1040                if (perf_event_overflow(event, &data, regs))
1041                        cpu_pmu->disable(hwc, idx);
1042        }
1043
1044        /*
1045         * Handle the pending perf events.
1046         *
1047         * Note: this call *must* be run with interrupts disabled. For
1048         * platforms that can have the PMU interrupts raised as an NMI, this
1049         * will not work.
1050         */
1051        irq_work_run();
1052
1053        return IRQ_HANDLED;
1054}
1055
1056static void armv7pmu_start(void)
1057{
1058        unsigned long flags;
1059        struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1060
1061        raw_spin_lock_irqsave(&events->pmu_lock, flags);
1062        /* Enable all counters */
1063        armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1064        raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1065}
1066
1067static void armv7pmu_stop(void)
1068{
1069        unsigned long flags;
1070        struct pmu_hw_events *events = cpu_pmu->get_hw_events();
1071
1072        raw_spin_lock_irqsave(&events->pmu_lock, flags);
1073        /* Disable all counters */
1074        armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1075        raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1076}
1077
1078static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc,
1079                                  struct hw_perf_event *event)
1080{
1081        int idx;
1082        unsigned long evtype = event->config_base & ARMV7_EVTYPE_EVENT;
1083
1084        /* Always place a cycle counter into the cycle counter. */
1085        if (evtype == ARMV7_PERFCTR_CPU_CYCLES) {
1086                if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1087                        return -EAGAIN;
1088
1089                return ARMV7_IDX_CYCLE_COUNTER;
1090        }
1091
1092        /*
1093         * For anything other than a cycle counter, try and use
1094         * the events counters
1095         */
1096        for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
1097                if (!test_and_set_bit(idx, cpuc->used_mask))
1098                        return idx;
1099        }
1100
1101        /* The counters are all in use. */
1102        return -EAGAIN;
1103}
1104
1105/*
1106 * Add an event filter to a given event. This will only work for PMUv2 PMUs.
1107 */
1108static int armv7pmu_set_event_filter(struct hw_perf_event *event,
1109                                     struct perf_event_attr *attr)
1110{
1111        unsigned long config_base = 0;
1112
1113        if (attr->exclude_idle)
1114                return -EPERM;
1115        if (attr->exclude_user)
1116                config_base |= ARMV7_EXCLUDE_USER;
1117        if (attr->exclude_kernel)
1118                config_base |= ARMV7_EXCLUDE_PL1;
1119        if (!attr->exclude_hv)
1120                config_base |= ARMV7_INCLUDE_HYP;
1121
1122        /*
1123         * Install the filter into config_base as this is used to
1124         * construct the event type.
1125         */
1126        event->config_base = config_base;
1127
1128        return 0;
1129}
1130
1131static void armv7pmu_reset(void *info)
1132{
1133        u32 idx, nb_cnt = cpu_pmu->num_events;
1134
1135        /* The counter and interrupt enable registers are unknown at reset. */
1136        for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx)
1137                armv7pmu_disable_event(NULL, idx);
1138
1139        /* Initialize & Reset PMNC: C and P bits */
1140        armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1141}
1142
1143static int armv7_a8_map_event(struct perf_event *event)
1144{
1145        return map_cpu_event(event, &armv7_a8_perf_map,
1146                                &armv7_a8_perf_cache_map, 0xFF);
1147}
1148
1149static int armv7_a9_map_event(struct perf_event *event)
1150{
1151        return map_cpu_event(event, &armv7_a9_perf_map,
1152                                &armv7_a9_perf_cache_map, 0xFF);
1153}
1154
1155static int armv7_a5_map_event(struct perf_event *event)
1156{
1157        return map_cpu_event(event, &armv7_a5_perf_map,
1158                                &armv7_a5_perf_cache_map, 0xFF);
1159}
1160
1161static int armv7_a15_map_event(struct perf_event *event)
1162{
1163        return map_cpu_event(event, &armv7_a15_perf_map,
1164                                &armv7_a15_perf_cache_map, 0xFF);
1165}
1166
1167static struct arm_pmu armv7pmu = {
1168        .handle_irq             = armv7pmu_handle_irq,
1169        .enable                 = armv7pmu_enable_event,
1170        .disable                = armv7pmu_disable_event,
1171        .read_counter           = armv7pmu_read_counter,
1172        .write_counter          = armv7pmu_write_counter,
1173        .get_event_idx          = armv7pmu_get_event_idx,
1174        .start                  = armv7pmu_start,
1175        .stop                   = armv7pmu_stop,
1176        .reset                  = armv7pmu_reset,
1177        .max_period             = (1LLU << 32) - 1,
1178};
1179
1180static u32 __init armv7_read_num_pmnc_events(void)
1181{
1182        u32 nb_cnt;
1183
1184        /* Read the nb of CNTx counters supported from PMNC */
1185        nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1186
1187        /* Add the CPU cycles counter and return */
1188        return nb_cnt + 1;
1189}
1190
1191static struct arm_pmu *__init armv7_a8_pmu_init(void)
1192{
1193        armv7pmu.id             = ARM_PERF_PMU_ID_CA8;
1194        armv7pmu.name           = "ARMv7 Cortex-A8";
1195        armv7pmu.map_event      = armv7_a8_map_event;
1196        armv7pmu.num_events     = armv7_read_num_pmnc_events();
1197        return &armv7pmu;
1198}
1199
1200static struct arm_pmu *__init armv7_a9_pmu_init(void)
1201{
1202        armv7pmu.id             = ARM_PERF_PMU_ID_CA9;
1203        armv7pmu.name           = "ARMv7 Cortex-A9";
1204        armv7pmu.map_event      = armv7_a9_map_event;
1205        armv7pmu.num_events     = armv7_read_num_pmnc_events();
1206        return &armv7pmu;
1207}
1208
1209static struct arm_pmu *__init armv7_a5_pmu_init(void)
1210{
1211        armv7pmu.id             = ARM_PERF_PMU_ID_CA5;
1212        armv7pmu.name           = "ARMv7 Cortex-A5";
1213        armv7pmu.map_event      = armv7_a5_map_event;
1214        armv7pmu.num_events     = armv7_read_num_pmnc_events();
1215        return &armv7pmu;
1216}
1217
1218static struct arm_pmu *__init armv7_a15_pmu_init(void)
1219{
1220        armv7pmu.id             = ARM_PERF_PMU_ID_CA15;
1221        armv7pmu.name           = "ARMv7 Cortex-A15";
1222        armv7pmu.map_event      = armv7_a15_map_event;
1223        armv7pmu.num_events     = armv7_read_num_pmnc_events();
1224        armv7pmu.set_event_filter = armv7pmu_set_event_filter;
1225        return &armv7pmu;
1226}
1227#else
1228static struct arm_pmu *__init armv7_a8_pmu_init(void)
1229{
1230        return NULL;
1231}
1232
1233static struct arm_pmu *__init armv7_a9_pmu_init(void)
1234{
1235        return NULL;
1236}
1237
1238static struct arm_pmu *__init armv7_a5_pmu_init(void)
1239{
1240        return NULL;
1241}
1242
1243static struct arm_pmu *__init armv7_a15_pmu_init(void)
1244{
1245        return NULL;
1246}
1247#endif  /* CONFIG_CPU_V7 */
1248