0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020 #ifdef CONFIG_CPU_V7
0021
0022 #include <asm/cp15.h>
0023 #include <asm/cputype.h>
0024 #include <asm/irq_regs.h>
0025 #include <asm/vfp.h>
0026 #include "../vfp/vfpinstr.h"
0027
0028 #include <linux/of.h>
0029 #include <linux/perf/arm_pmu.h>
0030 #include <linux/platform_device.h>
0031
0032
0033
0034
0035
0036
0037
0038
0039 #define ARMV7_PERFCTR_PMNC_SW_INCR 0x00
0040 #define ARMV7_PERFCTR_L1_ICACHE_REFILL 0x01
0041 #define ARMV7_PERFCTR_ITLB_REFILL 0x02
0042 #define ARMV7_PERFCTR_L1_DCACHE_REFILL 0x03
0043 #define ARMV7_PERFCTR_L1_DCACHE_ACCESS 0x04
0044 #define ARMV7_PERFCTR_DTLB_REFILL 0x05
0045 #define ARMV7_PERFCTR_MEM_READ 0x06
0046 #define ARMV7_PERFCTR_MEM_WRITE 0x07
0047 #define ARMV7_PERFCTR_INSTR_EXECUTED 0x08
0048 #define ARMV7_PERFCTR_EXC_TAKEN 0x09
0049 #define ARMV7_PERFCTR_EXC_EXECUTED 0x0A
0050 #define ARMV7_PERFCTR_CID_WRITE 0x0B
0051
0052
0053
0054
0055
0056
0057
0058
0059 #define ARMV7_PERFCTR_PC_WRITE 0x0C
0060 #define ARMV7_PERFCTR_PC_IMM_BRANCH 0x0D
0061 #define ARMV7_PERFCTR_PC_PROC_RETURN 0x0E
0062 #define ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS 0x0F
0063 #define ARMV7_PERFCTR_PC_BRANCH_MIS_PRED 0x10
0064 #define ARMV7_PERFCTR_CLOCK_CYCLES 0x11
0065 #define ARMV7_PERFCTR_PC_BRANCH_PRED 0x12
0066
0067
0068 #define ARMV7_PERFCTR_MEM_ACCESS 0x13
0069 #define ARMV7_PERFCTR_L1_ICACHE_ACCESS 0x14
0070 #define ARMV7_PERFCTR_L1_DCACHE_WB 0x15
0071 #define ARMV7_PERFCTR_L2_CACHE_ACCESS 0x16
0072 #define ARMV7_PERFCTR_L2_CACHE_REFILL 0x17
0073 #define ARMV7_PERFCTR_L2_CACHE_WB 0x18
0074 #define ARMV7_PERFCTR_BUS_ACCESS 0x19
0075 #define ARMV7_PERFCTR_MEM_ERROR 0x1A
0076 #define ARMV7_PERFCTR_INSTR_SPEC 0x1B
0077 #define ARMV7_PERFCTR_TTBR_WRITE 0x1C
0078 #define ARMV7_PERFCTR_BUS_CYCLES 0x1D
0079
0080 #define ARMV7_PERFCTR_CPU_CYCLES 0xFF
0081
0082
0083 #define ARMV7_A8_PERFCTR_L2_CACHE_ACCESS 0x43
0084 #define ARMV7_A8_PERFCTR_L2_CACHE_REFILL 0x44
0085 #define ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS 0x50
0086 #define ARMV7_A8_PERFCTR_STALL_ISIDE 0x56
0087
0088
0089 #define ARMV7_A9_PERFCTR_INSTR_CORE_RENAME 0x68
0090 #define ARMV7_A9_PERFCTR_STALL_ICACHE 0x60
0091 #define ARMV7_A9_PERFCTR_STALL_DISPATCH 0x66
0092
0093
0094 #define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL 0xc2
0095 #define ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP 0xc3
0096
0097
0098 #define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
0099 #define ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
0100 #define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ 0x42
0101 #define ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE 0x43
0102
0103 #define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ 0x4C
0104 #define ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE 0x4D
0105
0106 #define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ 0x50
0107 #define ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
0108 #define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ 0x52
0109 #define ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE 0x53
0110
0111 #define ARMV7_A15_PERFCTR_PC_WRITE_SPEC 0x76
0112
0113
0114 #define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ 0x40
0115 #define ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE 0x41
0116
0117 #define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ 0x50
0118 #define ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE 0x51
0119
0120 #define ARMV7_A12_PERFCTR_PC_WRITE_SPEC 0x76
0121
0122 #define ARMV7_A12_PERFCTR_PF_TLB_REFILL 0xe7
0123
0124
0125 #define KRAIT_PMRESR0_GROUP0 0xcc
0126 #define KRAIT_PMRESR1_GROUP0 0xd0
0127 #define KRAIT_PMRESR2_GROUP0 0xd4
0128 #define KRAIT_VPMRESR0_GROUP0 0xd8
0129
0130 #define KRAIT_PERFCTR_L1_ICACHE_ACCESS 0x10011
0131 #define KRAIT_PERFCTR_L1_ICACHE_MISS 0x10010
0132
0133 #define KRAIT_PERFCTR_L1_ITLB_ACCESS 0x12222
0134 #define KRAIT_PERFCTR_L1_DTLB_ACCESS 0x12210
0135
0136
0137 #define SCORPION_LPM0_GROUP0 0x4c
0138 #define SCORPION_LPM1_GROUP0 0x50
0139 #define SCORPION_LPM2_GROUP0 0x54
0140 #define SCORPION_L2LPM_GROUP0 0x58
0141 #define SCORPION_VLPM_GROUP0 0x5c
0142
0143 #define SCORPION_ICACHE_ACCESS 0x10053
0144 #define SCORPION_ICACHE_MISS 0x10052
0145
0146 #define SCORPION_DTLB_ACCESS 0x12013
0147 #define SCORPION_DTLB_MISS 0x12012
0148
0149 #define SCORPION_ITLB_MISS 0x12021
0150
0151
0152
0153
0154
0155
0156
0157
0158 static const unsigned armv7_a8_perf_map[PERF_COUNT_HW_MAX] = {
0159 PERF_MAP_ALL_UNSUPPORTED,
0160 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0161 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0162 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0163 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0164 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
0165 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0166 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A8_PERFCTR_STALL_ISIDE,
0167 };
0168
0169 static const unsigned armv7_a8_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0170 [PERF_COUNT_HW_CACHE_OP_MAX]
0171 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0172 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0173
0174
0175
0176
0177
0178
0179 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0180 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0181 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0182 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0183
0184 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L1_ICACHE_ACCESS,
0185 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
0186
0187 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
0188 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
0189 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A8_PERFCTR_L2_CACHE_ACCESS,
0190 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A8_PERFCTR_L2_CACHE_REFILL,
0191
0192 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0193 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0194
0195 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0196 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0197
0198 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0199 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0200 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0201 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0202 };
0203
0204
0205
0206
0207 static const unsigned armv7_a9_perf_map[PERF_COUNT_HW_MAX] = {
0208 PERF_MAP_ALL_UNSUPPORTED,
0209 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0210 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_A9_PERFCTR_INSTR_CORE_RENAME,
0211 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0212 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0213 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
0214 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0215 [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV7_A9_PERFCTR_STALL_ICACHE,
0216 [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV7_A9_PERFCTR_STALL_DISPATCH,
0217 };
0218
0219 static const unsigned armv7_a9_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0220 [PERF_COUNT_HW_CACHE_OP_MAX]
0221 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0222 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0223
0224
0225
0226
0227
0228
0229 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0230 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0231 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0232 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0233
0234 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
0235
0236 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0237 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0238
0239 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0240 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0241
0242 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0243 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0244 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0245 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0246 };
0247
0248
0249
0250
0251 static const unsigned armv7_a5_perf_map[PERF_COUNT_HW_MAX] = {
0252 PERF_MAP_ALL_UNSUPPORTED,
0253 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0254 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0255 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0256 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0257 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
0258 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0259 };
0260
0261 static const unsigned armv7_a5_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0262 [PERF_COUNT_HW_CACHE_OP_MAX]
0263 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0264 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0265
0266 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0267 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0268 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0269 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0270 [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
0271 [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
0272
0273 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
0274 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
0275
0276
0277
0278
0279 [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL,
0280 [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A5_PERFCTR_PREFETCH_LINEFILL_DROP,
0281
0282 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0283 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0284
0285 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0286 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0287
0288 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0289 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0290 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0291 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0292 };
0293
0294
0295
0296
0297 static const unsigned armv7_a15_perf_map[PERF_COUNT_HW_MAX] = {
0298 PERF_MAP_ALL_UNSUPPORTED,
0299 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0300 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0301 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0302 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0303 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A15_PERFCTR_PC_WRITE_SPEC,
0304 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0305 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
0306 };
0307
0308 static const unsigned armv7_a15_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0309 [PERF_COUNT_HW_CACHE_OP_MAX]
0310 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0311 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0312
0313 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_READ,
0314 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_READ,
0315 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L1_DCACHE_ACCESS_WRITE,
0316 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L1_DCACHE_REFILL_WRITE,
0317
0318
0319
0320
0321
0322
0323 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
0324 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
0325
0326 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_READ,
0327 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_READ,
0328 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A15_PERFCTR_L2_CACHE_ACCESS_WRITE,
0329 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_L2_CACHE_REFILL_WRITE,
0330
0331 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_READ,
0332 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_A15_PERFCTR_DTLB_REFILL_L1_WRITE,
0333
0334 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0335 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0336
0337 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0338 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0339 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0340 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0341 };
0342
0343
0344
0345
0346 static const unsigned armv7_a7_perf_map[PERF_COUNT_HW_MAX] = {
0347 PERF_MAP_ALL_UNSUPPORTED,
0348 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0349 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0350 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0351 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0352 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
0353 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0354 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
0355 };
0356
0357 static const unsigned armv7_a7_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0358 [PERF_COUNT_HW_CACHE_OP_MAX]
0359 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0360 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0361
0362
0363
0364
0365
0366
0367 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0368 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0369 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0370 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0371
0372 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
0373 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
0374
0375 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
0376 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
0377 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L2_CACHE_ACCESS,
0378 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
0379
0380 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0381 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0382
0383 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0384 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0385
0386 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0387 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0388 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0389 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0390 };
0391
0392
0393
0394
0395 static const unsigned armv7_a12_perf_map[PERF_COUNT_HW_MAX] = {
0396 PERF_MAP_ALL_UNSUPPORTED,
0397 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0398 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0399 [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0400 [PERF_COUNT_HW_CACHE_MISSES] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0401 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_A12_PERFCTR_PC_WRITE_SPEC,
0402 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0403 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_BUS_CYCLES,
0404 };
0405
0406 static const unsigned armv7_a12_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0407 [PERF_COUNT_HW_CACHE_OP_MAX]
0408 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0409 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0410
0411 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_READ,
0412 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0413 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L1_DCACHE_ACCESS_WRITE,
0414 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0415
0416
0417
0418
0419
0420
0421 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_ICACHE_ACCESS,
0422 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_ICACHE_REFILL,
0423
0424 [C(LL)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_READ,
0425 [C(LL)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
0426 [C(LL)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_A12_PERFCTR_L2_CACHE_ACCESS_WRITE,
0427 [C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L2_CACHE_REFILL,
0428
0429 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0430 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_DTLB_REFILL,
0431 [C(DTLB)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV7_A12_PERFCTR_PF_TLB_REFILL,
0432
0433 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0434 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_ITLB_REFILL,
0435
0436 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0437 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0438 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0439 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0440 };
0441
0442
0443
0444
0445 static const unsigned krait_perf_map[PERF_COUNT_HW_MAX] = {
0446 PERF_MAP_ALL_UNSUPPORTED,
0447 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0448 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0449 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
0450 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0451 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
0452 };
0453
0454 static const unsigned krait_perf_map_no_branch[PERF_COUNT_HW_MAX] = {
0455 PERF_MAP_ALL_UNSUPPORTED,
0456 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0457 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0458 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0459 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
0460 };
0461
0462 static const unsigned krait_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0463 [PERF_COUNT_HW_CACHE_OP_MAX]
0464 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0465 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0466
0467
0468
0469
0470
0471
0472 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0473 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0474 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0475 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0476
0477 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ICACHE_ACCESS,
0478 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = KRAIT_PERFCTR_L1_ICACHE_MISS,
0479
0480 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
0481 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_DTLB_ACCESS,
0482
0483 [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
0484 [C(ITLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = KRAIT_PERFCTR_L1_ITLB_ACCESS,
0485
0486 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0487 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0488 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0489 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0490 };
0491
0492
0493
0494
0495 static const unsigned scorpion_perf_map[PERF_COUNT_HW_MAX] = {
0496 PERF_MAP_ALL_UNSUPPORTED,
0497 [PERF_COUNT_HW_CPU_CYCLES] = ARMV7_PERFCTR_CPU_CYCLES,
0498 [PERF_COUNT_HW_INSTRUCTIONS] = ARMV7_PERFCTR_INSTR_EXECUTED,
0499 [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV7_PERFCTR_PC_WRITE,
0500 [PERF_COUNT_HW_BRANCH_MISSES] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0501 [PERF_COUNT_HW_BUS_CYCLES] = ARMV7_PERFCTR_CLOCK_CYCLES,
0502 };
0503
0504 static const unsigned scorpion_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
0505 [PERF_COUNT_HW_CACHE_OP_MAX]
0506 [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
0507 PERF_CACHE_MAP_ALL_UNSUPPORTED,
0508
0509
0510
0511
0512
0513 [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0514 [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0515 [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_L1_DCACHE_ACCESS,
0516 [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_L1_DCACHE_REFILL,
0517 [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_ICACHE_ACCESS,
0518 [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ICACHE_MISS,
0519
0520
0521
0522
0523 [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
0524 [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
0525 [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = SCORPION_DTLB_ACCESS,
0526 [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_DTLB_MISS,
0527 [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
0528 [C(ITLB)][C(OP_WRITE)][C(RESULT_MISS)] = SCORPION_ITLB_MISS,
0529 [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0530 [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0531 [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV7_PERFCTR_PC_BRANCH_PRED,
0532 [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV7_PERFCTR_PC_BRANCH_MIS_PRED,
0533 };
0534
0535 PMU_FORMAT_ATTR(event, "config:0-7");
0536
0537 static struct attribute *armv7_pmu_format_attrs[] = {
0538 &format_attr_event.attr,
0539 NULL,
0540 };
0541
0542 static struct attribute_group armv7_pmu_format_attr_group = {
0543 .name = "format",
0544 .attrs = armv7_pmu_format_attrs,
0545 };
0546
0547 #define ARMV7_EVENT_ATTR_RESOLVE(m) #m
0548 #define ARMV7_EVENT_ATTR(name, config) \
0549 PMU_EVENT_ATTR_STRING(name, armv7_event_attr_##name, \
0550 "event=" ARMV7_EVENT_ATTR_RESOLVE(config))
0551
0552 ARMV7_EVENT_ATTR(sw_incr, ARMV7_PERFCTR_PMNC_SW_INCR);
0553 ARMV7_EVENT_ATTR(l1i_cache_refill, ARMV7_PERFCTR_L1_ICACHE_REFILL);
0554 ARMV7_EVENT_ATTR(l1i_tlb_refill, ARMV7_PERFCTR_ITLB_REFILL);
0555 ARMV7_EVENT_ATTR(l1d_cache_refill, ARMV7_PERFCTR_L1_DCACHE_REFILL);
0556 ARMV7_EVENT_ATTR(l1d_cache, ARMV7_PERFCTR_L1_DCACHE_ACCESS);
0557 ARMV7_EVENT_ATTR(l1d_tlb_refill, ARMV7_PERFCTR_DTLB_REFILL);
0558 ARMV7_EVENT_ATTR(ld_retired, ARMV7_PERFCTR_MEM_READ);
0559 ARMV7_EVENT_ATTR(st_retired, ARMV7_PERFCTR_MEM_WRITE);
0560 ARMV7_EVENT_ATTR(inst_retired, ARMV7_PERFCTR_INSTR_EXECUTED);
0561 ARMV7_EVENT_ATTR(exc_taken, ARMV7_PERFCTR_EXC_TAKEN);
0562 ARMV7_EVENT_ATTR(exc_return, ARMV7_PERFCTR_EXC_EXECUTED);
0563 ARMV7_EVENT_ATTR(cid_write_retired, ARMV7_PERFCTR_CID_WRITE);
0564 ARMV7_EVENT_ATTR(pc_write_retired, ARMV7_PERFCTR_PC_WRITE);
0565 ARMV7_EVENT_ATTR(br_immed_retired, ARMV7_PERFCTR_PC_IMM_BRANCH);
0566 ARMV7_EVENT_ATTR(br_return_retired, ARMV7_PERFCTR_PC_PROC_RETURN);
0567 ARMV7_EVENT_ATTR(unaligned_ldst_retired, ARMV7_PERFCTR_MEM_UNALIGNED_ACCESS);
0568 ARMV7_EVENT_ATTR(br_mis_pred, ARMV7_PERFCTR_PC_BRANCH_MIS_PRED);
0569 ARMV7_EVENT_ATTR(cpu_cycles, ARMV7_PERFCTR_CLOCK_CYCLES);
0570 ARMV7_EVENT_ATTR(br_pred, ARMV7_PERFCTR_PC_BRANCH_PRED);
0571
0572 static struct attribute *armv7_pmuv1_event_attrs[] = {
0573 &armv7_event_attr_sw_incr.attr.attr,
0574 &armv7_event_attr_l1i_cache_refill.attr.attr,
0575 &armv7_event_attr_l1i_tlb_refill.attr.attr,
0576 &armv7_event_attr_l1d_cache_refill.attr.attr,
0577 &armv7_event_attr_l1d_cache.attr.attr,
0578 &armv7_event_attr_l1d_tlb_refill.attr.attr,
0579 &armv7_event_attr_ld_retired.attr.attr,
0580 &armv7_event_attr_st_retired.attr.attr,
0581 &armv7_event_attr_inst_retired.attr.attr,
0582 &armv7_event_attr_exc_taken.attr.attr,
0583 &armv7_event_attr_exc_return.attr.attr,
0584 &armv7_event_attr_cid_write_retired.attr.attr,
0585 &armv7_event_attr_pc_write_retired.attr.attr,
0586 &armv7_event_attr_br_immed_retired.attr.attr,
0587 &armv7_event_attr_br_return_retired.attr.attr,
0588 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
0589 &armv7_event_attr_br_mis_pred.attr.attr,
0590 &armv7_event_attr_cpu_cycles.attr.attr,
0591 &armv7_event_attr_br_pred.attr.attr,
0592 NULL,
0593 };
0594
0595 static struct attribute_group armv7_pmuv1_events_attr_group = {
0596 .name = "events",
0597 .attrs = armv7_pmuv1_event_attrs,
0598 };
0599
0600 ARMV7_EVENT_ATTR(mem_access, ARMV7_PERFCTR_MEM_ACCESS);
0601 ARMV7_EVENT_ATTR(l1i_cache, ARMV7_PERFCTR_L1_ICACHE_ACCESS);
0602 ARMV7_EVENT_ATTR(l1d_cache_wb, ARMV7_PERFCTR_L1_DCACHE_WB);
0603 ARMV7_EVENT_ATTR(l2d_cache, ARMV7_PERFCTR_L2_CACHE_ACCESS);
0604 ARMV7_EVENT_ATTR(l2d_cache_refill, ARMV7_PERFCTR_L2_CACHE_REFILL);
0605 ARMV7_EVENT_ATTR(l2d_cache_wb, ARMV7_PERFCTR_L2_CACHE_WB);
0606 ARMV7_EVENT_ATTR(bus_access, ARMV7_PERFCTR_BUS_ACCESS);
0607 ARMV7_EVENT_ATTR(memory_error, ARMV7_PERFCTR_MEM_ERROR);
0608 ARMV7_EVENT_ATTR(inst_spec, ARMV7_PERFCTR_INSTR_SPEC);
0609 ARMV7_EVENT_ATTR(ttbr_write_retired, ARMV7_PERFCTR_TTBR_WRITE);
0610 ARMV7_EVENT_ATTR(bus_cycles, ARMV7_PERFCTR_BUS_CYCLES);
0611
0612 static struct attribute *armv7_pmuv2_event_attrs[] = {
0613 &armv7_event_attr_sw_incr.attr.attr,
0614 &armv7_event_attr_l1i_cache_refill.attr.attr,
0615 &armv7_event_attr_l1i_tlb_refill.attr.attr,
0616 &armv7_event_attr_l1d_cache_refill.attr.attr,
0617 &armv7_event_attr_l1d_cache.attr.attr,
0618 &armv7_event_attr_l1d_tlb_refill.attr.attr,
0619 &armv7_event_attr_ld_retired.attr.attr,
0620 &armv7_event_attr_st_retired.attr.attr,
0621 &armv7_event_attr_inst_retired.attr.attr,
0622 &armv7_event_attr_exc_taken.attr.attr,
0623 &armv7_event_attr_exc_return.attr.attr,
0624 &armv7_event_attr_cid_write_retired.attr.attr,
0625 &armv7_event_attr_pc_write_retired.attr.attr,
0626 &armv7_event_attr_br_immed_retired.attr.attr,
0627 &armv7_event_attr_br_return_retired.attr.attr,
0628 &armv7_event_attr_unaligned_ldst_retired.attr.attr,
0629 &armv7_event_attr_br_mis_pred.attr.attr,
0630 &armv7_event_attr_cpu_cycles.attr.attr,
0631 &armv7_event_attr_br_pred.attr.attr,
0632 &armv7_event_attr_mem_access.attr.attr,
0633 &armv7_event_attr_l1i_cache.attr.attr,
0634 &armv7_event_attr_l1d_cache_wb.attr.attr,
0635 &armv7_event_attr_l2d_cache.attr.attr,
0636 &armv7_event_attr_l2d_cache_refill.attr.attr,
0637 &armv7_event_attr_l2d_cache_wb.attr.attr,
0638 &armv7_event_attr_bus_access.attr.attr,
0639 &armv7_event_attr_memory_error.attr.attr,
0640 &armv7_event_attr_inst_spec.attr.attr,
0641 &armv7_event_attr_ttbr_write_retired.attr.attr,
0642 &armv7_event_attr_bus_cycles.attr.attr,
0643 NULL,
0644 };
0645
0646 static struct attribute_group armv7_pmuv2_events_attr_group = {
0647 .name = "events",
0648 .attrs = armv7_pmuv2_event_attrs,
0649 };
0650
0651
0652
0653
0654 #define ARMV7_IDX_CYCLE_COUNTER 0
0655 #define ARMV7_IDX_COUNTER0 1
0656 #define ARMV7_IDX_COUNTER_LAST(cpu_pmu) \
0657 (ARMV7_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
0658
0659 #define ARMV7_MAX_COUNTERS 32
0660 #define ARMV7_COUNTER_MASK (ARMV7_MAX_COUNTERS - 1)
0661
0662
0663
0664
0665
0666
0667
0668
0669 #define ARMV7_IDX_TO_COUNTER(x) \
0670 (((x) - ARMV7_IDX_COUNTER0) & ARMV7_COUNTER_MASK)
0671
0672
0673
0674
0675 #define ARMV7_PMNC_E (1 << 0)
0676 #define ARMV7_PMNC_P (1 << 1)
0677 #define ARMV7_PMNC_C (1 << 2)
0678 #define ARMV7_PMNC_D (1 << 3)
0679 #define ARMV7_PMNC_X (1 << 4)
0680 #define ARMV7_PMNC_DP (1 << 5)
0681 #define ARMV7_PMNC_N_SHIFT 11
0682 #define ARMV7_PMNC_N_MASK 0x1f
0683 #define ARMV7_PMNC_MASK 0x3f
0684
0685
0686
0687
0688 #define ARMV7_FLAG_MASK 0xffffffff
0689 #define ARMV7_OVERFLOWED_MASK ARMV7_FLAG_MASK
0690
0691
0692
0693
0694 #define ARMV7_EVTYPE_MASK 0xc80000ff
0695 #define ARMV7_EVTYPE_EVENT 0xff
0696
0697
0698
0699
0700 #define ARMV7_EXCLUDE_PL1 BIT(31)
0701 #define ARMV7_EXCLUDE_USER BIT(30)
0702 #define ARMV7_INCLUDE_HYP BIT(27)
0703
0704
0705
0706
0707 #define ARMV7_SDER_SUNIDEN BIT(1)
0708
0709 static inline u32 armv7_pmnc_read(void)
0710 {
0711 u32 val;
0712 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r"(val));
0713 return val;
0714 }
0715
0716 static inline void armv7_pmnc_write(u32 val)
0717 {
0718 val &= ARMV7_PMNC_MASK;
0719 isb();
0720 asm volatile("mcr p15, 0, %0, c9, c12, 0" : : "r"(val));
0721 }
0722
0723 static inline int armv7_pmnc_has_overflowed(u32 pmnc)
0724 {
0725 return pmnc & ARMV7_OVERFLOWED_MASK;
0726 }
0727
0728 static inline int armv7_pmnc_counter_valid(struct arm_pmu *cpu_pmu, int idx)
0729 {
0730 return idx >= ARMV7_IDX_CYCLE_COUNTER &&
0731 idx <= ARMV7_IDX_COUNTER_LAST(cpu_pmu);
0732 }
0733
0734 static inline int armv7_pmnc_counter_has_overflowed(u32 pmnc, int idx)
0735 {
0736 return pmnc & BIT(ARMV7_IDX_TO_COUNTER(idx));
0737 }
0738
0739 static inline void armv7_pmnc_select_counter(int idx)
0740 {
0741 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
0742 asm volatile("mcr p15, 0, %0, c9, c12, 5" : : "r" (counter));
0743 isb();
0744 }
0745
0746 static inline u64 armv7pmu_read_counter(struct perf_event *event)
0747 {
0748 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
0749 struct hw_perf_event *hwc = &event->hw;
0750 int idx = hwc->idx;
0751 u32 value = 0;
0752
0753 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
0754 pr_err("CPU%u reading wrong counter %d\n",
0755 smp_processor_id(), idx);
0756 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
0757 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (value));
0758 } else {
0759 armv7_pmnc_select_counter(idx);
0760 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (value));
0761 }
0762
0763 return value;
0764 }
0765
0766 static inline void armv7pmu_write_counter(struct perf_event *event, u64 value)
0767 {
0768 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
0769 struct hw_perf_event *hwc = &event->hw;
0770 int idx = hwc->idx;
0771
0772 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
0773 pr_err("CPU%u writing wrong counter %d\n",
0774 smp_processor_id(), idx);
0775 } else if (idx == ARMV7_IDX_CYCLE_COUNTER) {
0776 asm volatile("mcr p15, 0, %0, c9, c13, 0" : : "r" ((u32)value));
0777 } else {
0778 armv7_pmnc_select_counter(idx);
0779 asm volatile("mcr p15, 0, %0, c9, c13, 2" : : "r" ((u32)value));
0780 }
0781 }
0782
0783 static inline void armv7_pmnc_write_evtsel(int idx, u32 val)
0784 {
0785 armv7_pmnc_select_counter(idx);
0786 val &= ARMV7_EVTYPE_MASK;
0787 asm volatile("mcr p15, 0, %0, c9, c13, 1" : : "r" (val));
0788 }
0789
0790 static inline void armv7_pmnc_enable_counter(int idx)
0791 {
0792 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
0793 asm volatile("mcr p15, 0, %0, c9, c12, 1" : : "r" (BIT(counter)));
0794 }
0795
0796 static inline void armv7_pmnc_disable_counter(int idx)
0797 {
0798 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
0799 asm volatile("mcr p15, 0, %0, c9, c12, 2" : : "r" (BIT(counter)));
0800 }
0801
0802 static inline void armv7_pmnc_enable_intens(int idx)
0803 {
0804 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
0805 asm volatile("mcr p15, 0, %0, c9, c14, 1" : : "r" (BIT(counter)));
0806 }
0807
0808 static inline void armv7_pmnc_disable_intens(int idx)
0809 {
0810 u32 counter = ARMV7_IDX_TO_COUNTER(idx);
0811 asm volatile("mcr p15, 0, %0, c9, c14, 2" : : "r" (BIT(counter)));
0812 isb();
0813
0814 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (BIT(counter)));
0815 isb();
0816 }
0817
0818 static inline u32 armv7_pmnc_getreset_flags(void)
0819 {
0820 u32 val;
0821
0822
0823 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
0824
0825
0826 val &= ARMV7_FLAG_MASK;
0827 asm volatile("mcr p15, 0, %0, c9, c12, 3" : : "r" (val));
0828
0829 return val;
0830 }
0831
0832 #ifdef DEBUG
0833 static void armv7_pmnc_dump_regs(struct arm_pmu *cpu_pmu)
0834 {
0835 u32 val;
0836 unsigned int cnt;
0837
0838 pr_info("PMNC registers dump:\n");
0839
0840 asm volatile("mrc p15, 0, %0, c9, c12, 0" : "=r" (val));
0841 pr_info("PMNC =0x%08x\n", val);
0842
0843 asm volatile("mrc p15, 0, %0, c9, c12, 1" : "=r" (val));
0844 pr_info("CNTENS=0x%08x\n", val);
0845
0846 asm volatile("mrc p15, 0, %0, c9, c14, 1" : "=r" (val));
0847 pr_info("INTENS=0x%08x\n", val);
0848
0849 asm volatile("mrc p15, 0, %0, c9, c12, 3" : "=r" (val));
0850 pr_info("FLAGS =0x%08x\n", val);
0851
0852 asm volatile("mrc p15, 0, %0, c9, c12, 5" : "=r" (val));
0853 pr_info("SELECT=0x%08x\n", val);
0854
0855 asm volatile("mrc p15, 0, %0, c9, c13, 0" : "=r" (val));
0856 pr_info("CCNT =0x%08x\n", val);
0857
0858 for (cnt = ARMV7_IDX_COUNTER0;
0859 cnt <= ARMV7_IDX_COUNTER_LAST(cpu_pmu); cnt++) {
0860 armv7_pmnc_select_counter(cnt);
0861 asm volatile("mrc p15, 0, %0, c9, c13, 2" : "=r" (val));
0862 pr_info("CNT[%d] count =0x%08x\n",
0863 ARMV7_IDX_TO_COUNTER(cnt), val);
0864 asm volatile("mrc p15, 0, %0, c9, c13, 1" : "=r" (val));
0865 pr_info("CNT[%d] evtsel=0x%08x\n",
0866 ARMV7_IDX_TO_COUNTER(cnt), val);
0867 }
0868 }
0869 #endif
0870
0871 static void armv7pmu_enable_event(struct perf_event *event)
0872 {
0873 unsigned long flags;
0874 struct hw_perf_event *hwc = &event->hw;
0875 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
0876 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
0877 int idx = hwc->idx;
0878
0879 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
0880 pr_err("CPU%u enabling wrong PMNC counter IRQ enable %d\n",
0881 smp_processor_id(), idx);
0882 return;
0883 }
0884
0885
0886
0887
0888
0889 raw_spin_lock_irqsave(&events->pmu_lock, flags);
0890
0891
0892
0893
0894 armv7_pmnc_disable_counter(idx);
0895
0896
0897
0898
0899
0900
0901 if (cpu_pmu->set_event_filter || idx != ARMV7_IDX_CYCLE_COUNTER)
0902 armv7_pmnc_write_evtsel(idx, hwc->config_base);
0903
0904
0905
0906
0907 armv7_pmnc_enable_intens(idx);
0908
0909
0910
0911
0912 armv7_pmnc_enable_counter(idx);
0913
0914 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
0915 }
0916
0917 static void armv7pmu_disable_event(struct perf_event *event)
0918 {
0919 unsigned long flags;
0920 struct hw_perf_event *hwc = &event->hw;
0921 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
0922 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
0923 int idx = hwc->idx;
0924
0925 if (!armv7_pmnc_counter_valid(cpu_pmu, idx)) {
0926 pr_err("CPU%u disabling wrong PMNC counter IRQ enable %d\n",
0927 smp_processor_id(), idx);
0928 return;
0929 }
0930
0931
0932
0933
0934 raw_spin_lock_irqsave(&events->pmu_lock, flags);
0935
0936
0937
0938
0939 armv7_pmnc_disable_counter(idx);
0940
0941
0942
0943
0944 armv7_pmnc_disable_intens(idx);
0945
0946 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
0947 }
0948
0949 static irqreturn_t armv7pmu_handle_irq(struct arm_pmu *cpu_pmu)
0950 {
0951 u32 pmnc;
0952 struct perf_sample_data data;
0953 struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
0954 struct pt_regs *regs;
0955 int idx;
0956
0957
0958
0959
0960 pmnc = armv7_pmnc_getreset_flags();
0961
0962
0963
0964
0965 if (!armv7_pmnc_has_overflowed(pmnc))
0966 return IRQ_NONE;
0967
0968
0969
0970
0971 regs = get_irq_regs();
0972
0973 for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
0974 struct perf_event *event = cpuc->events[idx];
0975 struct hw_perf_event *hwc;
0976
0977
0978 if (!event)
0979 continue;
0980
0981
0982
0983
0984
0985 if (!armv7_pmnc_counter_has_overflowed(pmnc, idx))
0986 continue;
0987
0988 hwc = &event->hw;
0989 armpmu_event_update(event);
0990 perf_sample_data_init(&data, 0, hwc->last_period);
0991 if (!armpmu_event_set_period(event))
0992 continue;
0993
0994 if (perf_event_overflow(event, &data, regs))
0995 cpu_pmu->disable(event);
0996 }
0997
0998
0999
1000
1001
1002
1003
1004
1005 irq_work_run();
1006
1007 return IRQ_HANDLED;
1008 }
1009
1010 static void armv7pmu_start(struct arm_pmu *cpu_pmu)
1011 {
1012 unsigned long flags;
1013 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1014
1015 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1016
1017 armv7_pmnc_write(armv7_pmnc_read() | ARMV7_PMNC_E);
1018 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1019 }
1020
1021 static void armv7pmu_stop(struct arm_pmu *cpu_pmu)
1022 {
1023 unsigned long flags;
1024 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1025
1026 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1027
1028 armv7_pmnc_write(armv7_pmnc_read() & ~ARMV7_PMNC_E);
1029 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1030 }
1031
1032 static int armv7pmu_get_event_idx(struct pmu_hw_events *cpuc,
1033 struct perf_event *event)
1034 {
1035 int idx;
1036 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1037 struct hw_perf_event *hwc = &event->hw;
1038 unsigned long evtype = hwc->config_base & ARMV7_EVTYPE_EVENT;
1039
1040
1041 if (evtype == ARMV7_PERFCTR_CPU_CYCLES) {
1042 if (test_and_set_bit(ARMV7_IDX_CYCLE_COUNTER, cpuc->used_mask))
1043 return -EAGAIN;
1044
1045 return ARMV7_IDX_CYCLE_COUNTER;
1046 }
1047
1048
1049
1050
1051
1052 for (idx = ARMV7_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
1053 if (!test_and_set_bit(idx, cpuc->used_mask))
1054 return idx;
1055 }
1056
1057
1058 return -EAGAIN;
1059 }
1060
1061 static void armv7pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1062 struct perf_event *event)
1063 {
1064 clear_bit(event->hw.idx, cpuc->used_mask);
1065 }
1066
1067
1068
1069
1070 static int armv7pmu_set_event_filter(struct hw_perf_event *event,
1071 struct perf_event_attr *attr)
1072 {
1073 unsigned long config_base = 0;
1074
1075 if (attr->exclude_idle)
1076 return -EPERM;
1077 if (attr->exclude_user)
1078 config_base |= ARMV7_EXCLUDE_USER;
1079 if (attr->exclude_kernel)
1080 config_base |= ARMV7_EXCLUDE_PL1;
1081 if (!attr->exclude_hv)
1082 config_base |= ARMV7_INCLUDE_HYP;
1083
1084
1085
1086
1087
1088 event->config_base = config_base;
1089
1090 return 0;
1091 }
1092
1093 static void armv7pmu_reset(void *info)
1094 {
1095 struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
1096 u32 idx, nb_cnt = cpu_pmu->num_events, val;
1097
1098 if (cpu_pmu->secure_access) {
1099 asm volatile("mrc p15, 0, %0, c1, c1, 1" : "=r" (val));
1100 val |= ARMV7_SDER_SUNIDEN;
1101 asm volatile("mcr p15, 0, %0, c1, c1, 1" : : "r" (val));
1102 }
1103
1104
1105 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1106 armv7_pmnc_disable_counter(idx);
1107 armv7_pmnc_disable_intens(idx);
1108 }
1109
1110
1111 armv7_pmnc_write(ARMV7_PMNC_P | ARMV7_PMNC_C);
1112 }
1113
1114 static int armv7_a8_map_event(struct perf_event *event)
1115 {
1116 return armpmu_map_event(event, &armv7_a8_perf_map,
1117 &armv7_a8_perf_cache_map, 0xFF);
1118 }
1119
1120 static int armv7_a9_map_event(struct perf_event *event)
1121 {
1122 return armpmu_map_event(event, &armv7_a9_perf_map,
1123 &armv7_a9_perf_cache_map, 0xFF);
1124 }
1125
1126 static int armv7_a5_map_event(struct perf_event *event)
1127 {
1128 return armpmu_map_event(event, &armv7_a5_perf_map,
1129 &armv7_a5_perf_cache_map, 0xFF);
1130 }
1131
1132 static int armv7_a15_map_event(struct perf_event *event)
1133 {
1134 return armpmu_map_event(event, &armv7_a15_perf_map,
1135 &armv7_a15_perf_cache_map, 0xFF);
1136 }
1137
1138 static int armv7_a7_map_event(struct perf_event *event)
1139 {
1140 return armpmu_map_event(event, &armv7_a7_perf_map,
1141 &armv7_a7_perf_cache_map, 0xFF);
1142 }
1143
1144 static int armv7_a12_map_event(struct perf_event *event)
1145 {
1146 return armpmu_map_event(event, &armv7_a12_perf_map,
1147 &armv7_a12_perf_cache_map, 0xFF);
1148 }
1149
1150 static int krait_map_event(struct perf_event *event)
1151 {
1152 return armpmu_map_event(event, &krait_perf_map,
1153 &krait_perf_cache_map, 0xFFFFF);
1154 }
1155
1156 static int krait_map_event_no_branch(struct perf_event *event)
1157 {
1158 return armpmu_map_event(event, &krait_perf_map_no_branch,
1159 &krait_perf_cache_map, 0xFFFFF);
1160 }
1161
1162 static int scorpion_map_event(struct perf_event *event)
1163 {
1164 return armpmu_map_event(event, &scorpion_perf_map,
1165 &scorpion_perf_cache_map, 0xFFFFF);
1166 }
1167
1168 static void armv7pmu_init(struct arm_pmu *cpu_pmu)
1169 {
1170 cpu_pmu->handle_irq = armv7pmu_handle_irq;
1171 cpu_pmu->enable = armv7pmu_enable_event;
1172 cpu_pmu->disable = armv7pmu_disable_event;
1173 cpu_pmu->read_counter = armv7pmu_read_counter;
1174 cpu_pmu->write_counter = armv7pmu_write_counter;
1175 cpu_pmu->get_event_idx = armv7pmu_get_event_idx;
1176 cpu_pmu->clear_event_idx = armv7pmu_clear_event_idx;
1177 cpu_pmu->start = armv7pmu_start;
1178 cpu_pmu->stop = armv7pmu_stop;
1179 cpu_pmu->reset = armv7pmu_reset;
1180 };
1181
1182 static void armv7_read_num_pmnc_events(void *info)
1183 {
1184 int *nb_cnt = info;
1185
1186
1187 *nb_cnt = (armv7_pmnc_read() >> ARMV7_PMNC_N_SHIFT) & ARMV7_PMNC_N_MASK;
1188
1189
1190 *nb_cnt += 1;
1191 }
1192
1193 static int armv7_probe_num_events(struct arm_pmu *arm_pmu)
1194 {
1195 return smp_call_function_any(&arm_pmu->supported_cpus,
1196 armv7_read_num_pmnc_events,
1197 &arm_pmu->num_events, 1);
1198 }
1199
1200 static int armv7_a8_pmu_init(struct arm_pmu *cpu_pmu)
1201 {
1202 armv7pmu_init(cpu_pmu);
1203 cpu_pmu->name = "armv7_cortex_a8";
1204 cpu_pmu->map_event = armv7_a8_map_event;
1205 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1206 &armv7_pmuv1_events_attr_group;
1207 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1208 &armv7_pmu_format_attr_group;
1209 return armv7_probe_num_events(cpu_pmu);
1210 }
1211
1212 static int armv7_a9_pmu_init(struct arm_pmu *cpu_pmu)
1213 {
1214 armv7pmu_init(cpu_pmu);
1215 cpu_pmu->name = "armv7_cortex_a9";
1216 cpu_pmu->map_event = armv7_a9_map_event;
1217 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1218 &armv7_pmuv1_events_attr_group;
1219 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1220 &armv7_pmu_format_attr_group;
1221 return armv7_probe_num_events(cpu_pmu);
1222 }
1223
1224 static int armv7_a5_pmu_init(struct arm_pmu *cpu_pmu)
1225 {
1226 armv7pmu_init(cpu_pmu);
1227 cpu_pmu->name = "armv7_cortex_a5";
1228 cpu_pmu->map_event = armv7_a5_map_event;
1229 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1230 &armv7_pmuv1_events_attr_group;
1231 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1232 &armv7_pmu_format_attr_group;
1233 return armv7_probe_num_events(cpu_pmu);
1234 }
1235
1236 static int armv7_a15_pmu_init(struct arm_pmu *cpu_pmu)
1237 {
1238 armv7pmu_init(cpu_pmu);
1239 cpu_pmu->name = "armv7_cortex_a15";
1240 cpu_pmu->map_event = armv7_a15_map_event;
1241 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1242 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1243 &armv7_pmuv2_events_attr_group;
1244 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1245 &armv7_pmu_format_attr_group;
1246 return armv7_probe_num_events(cpu_pmu);
1247 }
1248
1249 static int armv7_a7_pmu_init(struct arm_pmu *cpu_pmu)
1250 {
1251 armv7pmu_init(cpu_pmu);
1252 cpu_pmu->name = "armv7_cortex_a7";
1253 cpu_pmu->map_event = armv7_a7_map_event;
1254 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1255 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1256 &armv7_pmuv2_events_attr_group;
1257 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1258 &armv7_pmu_format_attr_group;
1259 return armv7_probe_num_events(cpu_pmu);
1260 }
1261
1262 static int armv7_a12_pmu_init(struct arm_pmu *cpu_pmu)
1263 {
1264 armv7pmu_init(cpu_pmu);
1265 cpu_pmu->name = "armv7_cortex_a12";
1266 cpu_pmu->map_event = armv7_a12_map_event;
1267 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1268 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1269 &armv7_pmuv2_events_attr_group;
1270 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1271 &armv7_pmu_format_attr_group;
1272 return armv7_probe_num_events(cpu_pmu);
1273 }
1274
1275 static int armv7_a17_pmu_init(struct arm_pmu *cpu_pmu)
1276 {
1277 int ret = armv7_a12_pmu_init(cpu_pmu);
1278 cpu_pmu->name = "armv7_cortex_a17";
1279 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
1280 &armv7_pmuv2_events_attr_group;
1281 cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
1282 &armv7_pmu_format_attr_group;
1283 return ret;
1284 }
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319 #define KRAIT_EVENT (1 << 16)
1320 #define VENUM_EVENT (2 << 16)
1321 #define KRAIT_EVENT_MASK (KRAIT_EVENT | VENUM_EVENT)
1322 #define PMRESRn_EN BIT(31)
1323
1324 #define EVENT_REGION(event) (((event) >> 12) & 0xf)
1325 #define EVENT_GROUP(event) ((event) & 0xf)
1326 #define EVENT_CODE(event) (((event) >> 4) & 0xff)
1327 #define EVENT_VENUM(event) (!!(event & VENUM_EVENT))
1328 #define EVENT_CPU(event) (!!(event & KRAIT_EVENT))
1329
1330 static u32 krait_read_pmresrn(int n)
1331 {
1332 u32 val;
1333
1334 switch (n) {
1335 case 0:
1336 asm volatile("mrc p15, 1, %0, c9, c15, 0" : "=r" (val));
1337 break;
1338 case 1:
1339 asm volatile("mrc p15, 1, %0, c9, c15, 1" : "=r" (val));
1340 break;
1341 case 2:
1342 asm volatile("mrc p15, 1, %0, c9, c15, 2" : "=r" (val));
1343 break;
1344 default:
1345 BUG();
1346 }
1347
1348 return val;
1349 }
1350
1351 static void krait_write_pmresrn(int n, u32 val)
1352 {
1353 switch (n) {
1354 case 0:
1355 asm volatile("mcr p15, 1, %0, c9, c15, 0" : : "r" (val));
1356 break;
1357 case 1:
1358 asm volatile("mcr p15, 1, %0, c9, c15, 1" : : "r" (val));
1359 break;
1360 case 2:
1361 asm volatile("mcr p15, 1, %0, c9, c15, 2" : : "r" (val));
1362 break;
1363 default:
1364 BUG();
1365 }
1366 }
1367
1368 static u32 venum_read_pmresr(void)
1369 {
1370 u32 val;
1371 asm volatile("mrc p10, 7, %0, c11, c0, 0" : "=r" (val));
1372 return val;
1373 }
1374
1375 static void venum_write_pmresr(u32 val)
1376 {
1377 asm volatile("mcr p10, 7, %0, c11, c0, 0" : : "r" (val));
1378 }
1379
1380 static void venum_pre_pmresr(u32 *venum_orig_val, u32 *fp_orig_val)
1381 {
1382 u32 venum_new_val;
1383 u32 fp_new_val;
1384
1385 BUG_ON(preemptible());
1386
1387 *venum_orig_val = get_copro_access();
1388 venum_new_val = *venum_orig_val | CPACC_SVC(10) | CPACC_SVC(11);
1389 set_copro_access(venum_new_val);
1390
1391
1392 *fp_orig_val = fmrx(FPEXC);
1393 fp_new_val = *fp_orig_val | FPEXC_EN;
1394 fmxr(FPEXC, fp_new_val);
1395 }
1396
1397 static void venum_post_pmresr(u32 venum_orig_val, u32 fp_orig_val)
1398 {
1399 BUG_ON(preemptible());
1400
1401 fmxr(FPEXC, fp_orig_val);
1402 isb();
1403
1404 set_copro_access(venum_orig_val);
1405 }
1406
1407 static u32 krait_get_pmresrn_event(unsigned int region)
1408 {
1409 static const u32 pmresrn_table[] = { KRAIT_PMRESR0_GROUP0,
1410 KRAIT_PMRESR1_GROUP0,
1411 KRAIT_PMRESR2_GROUP0 };
1412 return pmresrn_table[region];
1413 }
1414
1415 static void krait_evt_setup(int idx, u32 config_base)
1416 {
1417 u32 val;
1418 u32 mask;
1419 u32 vval, fval;
1420 unsigned int region = EVENT_REGION(config_base);
1421 unsigned int group = EVENT_GROUP(config_base);
1422 unsigned int code = EVENT_CODE(config_base);
1423 unsigned int group_shift;
1424 bool venum_event = EVENT_VENUM(config_base);
1425
1426 group_shift = group * 8;
1427 mask = 0xff << group_shift;
1428
1429
1430 if (venum_event)
1431 val = KRAIT_VPMRESR0_GROUP0;
1432 else
1433 val = krait_get_pmresrn_event(region);
1434 val += group;
1435
1436 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1437 armv7_pmnc_write_evtsel(idx, val);
1438
1439 if (venum_event) {
1440 venum_pre_pmresr(&vval, &fval);
1441 val = venum_read_pmresr();
1442 val &= ~mask;
1443 val |= code << group_shift;
1444 val |= PMRESRn_EN;
1445 venum_write_pmresr(val);
1446 venum_post_pmresr(vval, fval);
1447 } else {
1448 val = krait_read_pmresrn(region);
1449 val &= ~mask;
1450 val |= code << group_shift;
1451 val |= PMRESRn_EN;
1452 krait_write_pmresrn(region, val);
1453 }
1454 }
1455
1456 static u32 clear_pmresrn_group(u32 val, int group)
1457 {
1458 u32 mask;
1459 int group_shift;
1460
1461 group_shift = group * 8;
1462 mask = 0xff << group_shift;
1463 val &= ~mask;
1464
1465
1466 if (val & ~PMRESRn_EN)
1467 return val |= PMRESRn_EN;
1468
1469 return 0;
1470 }
1471
1472 static void krait_clearpmu(u32 config_base)
1473 {
1474 u32 val;
1475 u32 vval, fval;
1476 unsigned int region = EVENT_REGION(config_base);
1477 unsigned int group = EVENT_GROUP(config_base);
1478 bool venum_event = EVENT_VENUM(config_base);
1479
1480 if (venum_event) {
1481 venum_pre_pmresr(&vval, &fval);
1482 val = venum_read_pmresr();
1483 val = clear_pmresrn_group(val, group);
1484 venum_write_pmresr(val);
1485 venum_post_pmresr(vval, fval);
1486 } else {
1487 val = krait_read_pmresrn(region);
1488 val = clear_pmresrn_group(val, group);
1489 krait_write_pmresrn(region, val);
1490 }
1491 }
1492
1493 static void krait_pmu_disable_event(struct perf_event *event)
1494 {
1495 unsigned long flags;
1496 struct hw_perf_event *hwc = &event->hw;
1497 int idx = hwc->idx;
1498 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1499 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1500
1501
1502 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1503
1504
1505 armv7_pmnc_disable_counter(idx);
1506
1507
1508
1509
1510 if (hwc->config_base & KRAIT_EVENT_MASK)
1511 krait_clearpmu(hwc->config_base);
1512
1513
1514 armv7_pmnc_disable_intens(idx);
1515
1516 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1517 }
1518
1519 static void krait_pmu_enable_event(struct perf_event *event)
1520 {
1521 unsigned long flags;
1522 struct hw_perf_event *hwc = &event->hw;
1523 int idx = hwc->idx;
1524 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1525 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1526
1527
1528
1529
1530
1531 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1532
1533
1534 armv7_pmnc_disable_counter(idx);
1535
1536
1537
1538
1539
1540
1541 if (hwc->config_base & KRAIT_EVENT_MASK)
1542 krait_evt_setup(idx, hwc->config_base);
1543 else
1544 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1545
1546
1547 armv7_pmnc_enable_intens(idx);
1548
1549
1550 armv7_pmnc_enable_counter(idx);
1551
1552 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1553 }
1554
1555 static void krait_pmu_reset(void *info)
1556 {
1557 u32 vval, fval;
1558 struct arm_pmu *cpu_pmu = info;
1559 u32 idx, nb_cnt = cpu_pmu->num_events;
1560
1561 armv7pmu_reset(info);
1562
1563
1564 krait_write_pmresrn(0, 0);
1565 krait_write_pmresrn(1, 0);
1566 krait_write_pmresrn(2, 0);
1567
1568 venum_pre_pmresr(&vval, &fval);
1569 venum_write_pmresr(0);
1570 venum_post_pmresr(vval, fval);
1571
1572
1573 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1574 armv7_pmnc_select_counter(idx);
1575 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1576 }
1577
1578 }
1579
1580 static int krait_event_to_bit(struct perf_event *event, unsigned int region,
1581 unsigned int group)
1582 {
1583 int bit;
1584 struct hw_perf_event *hwc = &event->hw;
1585 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1586
1587 if (hwc->config_base & VENUM_EVENT)
1588 bit = KRAIT_VPMRESR0_GROUP0;
1589 else
1590 bit = krait_get_pmresrn_event(region);
1591 bit -= krait_get_pmresrn_event(0);
1592 bit += group;
1593
1594
1595
1596
1597 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1598
1599 return bit;
1600 }
1601
1602
1603
1604
1605
1606 static int krait_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1607 struct perf_event *event)
1608 {
1609 int idx;
1610 int bit = -1;
1611 struct hw_perf_event *hwc = &event->hw;
1612 unsigned int region = EVENT_REGION(hwc->config_base);
1613 unsigned int code = EVENT_CODE(hwc->config_base);
1614 unsigned int group = EVENT_GROUP(hwc->config_base);
1615 bool venum_event = EVENT_VENUM(hwc->config_base);
1616 bool krait_event = EVENT_CPU(hwc->config_base);
1617
1618 if (venum_event || krait_event) {
1619
1620 if (group > 3 || region > 2)
1621 return -EINVAL;
1622 if (venum_event && (code & 0xe0))
1623 return -EINVAL;
1624
1625 bit = krait_event_to_bit(event, region, group);
1626 if (test_and_set_bit(bit, cpuc->used_mask))
1627 return -EAGAIN;
1628 }
1629
1630 idx = armv7pmu_get_event_idx(cpuc, event);
1631 if (idx < 0 && bit >= 0)
1632 clear_bit(bit, cpuc->used_mask);
1633
1634 return idx;
1635 }
1636
1637 static void krait_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1638 struct perf_event *event)
1639 {
1640 int bit;
1641 struct hw_perf_event *hwc = &event->hw;
1642 unsigned int region = EVENT_REGION(hwc->config_base);
1643 unsigned int group = EVENT_GROUP(hwc->config_base);
1644 bool venum_event = EVENT_VENUM(hwc->config_base);
1645 bool krait_event = EVENT_CPU(hwc->config_base);
1646
1647 armv7pmu_clear_event_idx(cpuc, event);
1648 if (venum_event || krait_event) {
1649 bit = krait_event_to_bit(event, region, group);
1650 clear_bit(bit, cpuc->used_mask);
1651 }
1652 }
1653
1654 static int krait_pmu_init(struct arm_pmu *cpu_pmu)
1655 {
1656 armv7pmu_init(cpu_pmu);
1657 cpu_pmu->name = "armv7_krait";
1658
1659 if (of_property_read_bool(cpu_pmu->plat_device->dev.of_node,
1660 "qcom,no-pc-write"))
1661 cpu_pmu->map_event = krait_map_event_no_branch;
1662 else
1663 cpu_pmu->map_event = krait_map_event;
1664 cpu_pmu->set_event_filter = armv7pmu_set_event_filter;
1665 cpu_pmu->reset = krait_pmu_reset;
1666 cpu_pmu->enable = krait_pmu_enable_event;
1667 cpu_pmu->disable = krait_pmu_disable_event;
1668 cpu_pmu->get_event_idx = krait_pmu_get_event_idx;
1669 cpu_pmu->clear_event_idx = krait_pmu_clear_event_idx;
1670 return armv7_probe_num_events(cpu_pmu);
1671 }
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682
1683
1684
1685
1686
1687
1688
1689
1690
1691
1692
1693
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709 static u32 scorpion_read_pmresrn(int n)
1710 {
1711 u32 val;
1712
1713 switch (n) {
1714 case 0:
1715 asm volatile("mrc p15, 0, %0, c15, c0, 0" : "=r" (val));
1716 break;
1717 case 1:
1718 asm volatile("mrc p15, 1, %0, c15, c0, 0" : "=r" (val));
1719 break;
1720 case 2:
1721 asm volatile("mrc p15, 2, %0, c15, c0, 0" : "=r" (val));
1722 break;
1723 case 3:
1724 asm volatile("mrc p15, 3, %0, c15, c2, 0" : "=r" (val));
1725 break;
1726 default:
1727 BUG();
1728 }
1729
1730 return val;
1731 }
1732
1733 static void scorpion_write_pmresrn(int n, u32 val)
1734 {
1735 switch (n) {
1736 case 0:
1737 asm volatile("mcr p15, 0, %0, c15, c0, 0" : : "r" (val));
1738 break;
1739 case 1:
1740 asm volatile("mcr p15, 1, %0, c15, c0, 0" : : "r" (val));
1741 break;
1742 case 2:
1743 asm volatile("mcr p15, 2, %0, c15, c0, 0" : : "r" (val));
1744 break;
1745 case 3:
1746 asm volatile("mcr p15, 3, %0, c15, c2, 0" : : "r" (val));
1747 break;
1748 default:
1749 BUG();
1750 }
1751 }
1752
1753 static u32 scorpion_get_pmresrn_event(unsigned int region)
1754 {
1755 static const u32 pmresrn_table[] = { SCORPION_LPM0_GROUP0,
1756 SCORPION_LPM1_GROUP0,
1757 SCORPION_LPM2_GROUP0,
1758 SCORPION_L2LPM_GROUP0 };
1759 return pmresrn_table[region];
1760 }
1761
1762 static void scorpion_evt_setup(int idx, u32 config_base)
1763 {
1764 u32 val;
1765 u32 mask;
1766 u32 vval, fval;
1767 unsigned int region = EVENT_REGION(config_base);
1768 unsigned int group = EVENT_GROUP(config_base);
1769 unsigned int code = EVENT_CODE(config_base);
1770 unsigned int group_shift;
1771 bool venum_event = EVENT_VENUM(config_base);
1772
1773 group_shift = group * 8;
1774 mask = 0xff << group_shift;
1775
1776
1777 if (venum_event)
1778 val = SCORPION_VLPM_GROUP0;
1779 else
1780 val = scorpion_get_pmresrn_event(region);
1781 val += group;
1782
1783 val |= config_base & (ARMV7_EXCLUDE_USER | ARMV7_EXCLUDE_PL1);
1784 armv7_pmnc_write_evtsel(idx, val);
1785
1786 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1787
1788 if (venum_event) {
1789 venum_pre_pmresr(&vval, &fval);
1790 val = venum_read_pmresr();
1791 val &= ~mask;
1792 val |= code << group_shift;
1793 val |= PMRESRn_EN;
1794 venum_write_pmresr(val);
1795 venum_post_pmresr(vval, fval);
1796 } else {
1797 val = scorpion_read_pmresrn(region);
1798 val &= ~mask;
1799 val |= code << group_shift;
1800 val |= PMRESRn_EN;
1801 scorpion_write_pmresrn(region, val);
1802 }
1803 }
1804
1805 static void scorpion_clearpmu(u32 config_base)
1806 {
1807 u32 val;
1808 u32 vval, fval;
1809 unsigned int region = EVENT_REGION(config_base);
1810 unsigned int group = EVENT_GROUP(config_base);
1811 bool venum_event = EVENT_VENUM(config_base);
1812
1813 if (venum_event) {
1814 venum_pre_pmresr(&vval, &fval);
1815 val = venum_read_pmresr();
1816 val = clear_pmresrn_group(val, group);
1817 venum_write_pmresr(val);
1818 venum_post_pmresr(vval, fval);
1819 } else {
1820 val = scorpion_read_pmresrn(region);
1821 val = clear_pmresrn_group(val, group);
1822 scorpion_write_pmresrn(region, val);
1823 }
1824 }
1825
1826 static void scorpion_pmu_disable_event(struct perf_event *event)
1827 {
1828 unsigned long flags;
1829 struct hw_perf_event *hwc = &event->hw;
1830 int idx = hwc->idx;
1831 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1832 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1833
1834
1835 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1836
1837
1838 armv7_pmnc_disable_counter(idx);
1839
1840
1841
1842
1843 if (hwc->config_base & KRAIT_EVENT_MASK)
1844 scorpion_clearpmu(hwc->config_base);
1845
1846
1847 armv7_pmnc_disable_intens(idx);
1848
1849 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1850 }
1851
1852 static void scorpion_pmu_enable_event(struct perf_event *event)
1853 {
1854 unsigned long flags;
1855 struct hw_perf_event *hwc = &event->hw;
1856 int idx = hwc->idx;
1857 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1858 struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
1859
1860
1861
1862
1863
1864 raw_spin_lock_irqsave(&events->pmu_lock, flags);
1865
1866
1867 armv7_pmnc_disable_counter(idx);
1868
1869
1870
1871
1872
1873
1874 if (hwc->config_base & KRAIT_EVENT_MASK)
1875 scorpion_evt_setup(idx, hwc->config_base);
1876 else if (idx != ARMV7_IDX_CYCLE_COUNTER)
1877 armv7_pmnc_write_evtsel(idx, hwc->config_base);
1878
1879
1880 armv7_pmnc_enable_intens(idx);
1881
1882
1883 armv7_pmnc_enable_counter(idx);
1884
1885 raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
1886 }
1887
1888 static void scorpion_pmu_reset(void *info)
1889 {
1890 u32 vval, fval;
1891 struct arm_pmu *cpu_pmu = info;
1892 u32 idx, nb_cnt = cpu_pmu->num_events;
1893
1894 armv7pmu_reset(info);
1895
1896
1897 scorpion_write_pmresrn(0, 0);
1898 scorpion_write_pmresrn(1, 0);
1899 scorpion_write_pmresrn(2, 0);
1900 scorpion_write_pmresrn(3, 0);
1901
1902 venum_pre_pmresr(&vval, &fval);
1903 venum_write_pmresr(0);
1904 venum_post_pmresr(vval, fval);
1905
1906
1907 for (idx = ARMV7_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
1908 armv7_pmnc_select_counter(idx);
1909 asm volatile("mcr p15, 0, %0, c9, c15, 0" : : "r" (0));
1910 }
1911 }
1912
1913 static int scorpion_event_to_bit(struct perf_event *event, unsigned int region,
1914 unsigned int group)
1915 {
1916 int bit;
1917 struct hw_perf_event *hwc = &event->hw;
1918 struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
1919
1920 if (hwc->config_base & VENUM_EVENT)
1921 bit = SCORPION_VLPM_GROUP0;
1922 else
1923 bit = scorpion_get_pmresrn_event(region);
1924 bit -= scorpion_get_pmresrn_event(0);
1925 bit += group;
1926
1927
1928
1929
1930 bit += ARMV7_IDX_COUNTER_LAST(cpu_pmu) + 1;
1931
1932 return bit;
1933 }
1934
1935
1936
1937
1938
1939 static int scorpion_pmu_get_event_idx(struct pmu_hw_events *cpuc,
1940 struct perf_event *event)
1941 {
1942 int idx;
1943 int bit = -1;
1944 struct hw_perf_event *hwc = &event->hw;
1945 unsigned int region = EVENT_REGION(hwc->config_base);
1946 unsigned int group = EVENT_GROUP(hwc->config_base);
1947 bool venum_event = EVENT_VENUM(hwc->config_base);
1948 bool scorpion_event = EVENT_CPU(hwc->config_base);
1949
1950 if (venum_event || scorpion_event) {
1951
1952 if (group > 3 || region > 3)
1953 return -EINVAL;
1954
1955 bit = scorpion_event_to_bit(event, region, group);
1956 if (test_and_set_bit(bit, cpuc->used_mask))
1957 return -EAGAIN;
1958 }
1959
1960 idx = armv7pmu_get_event_idx(cpuc, event);
1961 if (idx < 0 && bit >= 0)
1962 clear_bit(bit, cpuc->used_mask);
1963
1964 return idx;
1965 }
1966
1967 static void scorpion_pmu_clear_event_idx(struct pmu_hw_events *cpuc,
1968 struct perf_event *event)
1969 {
1970 int bit;
1971 struct hw_perf_event *hwc = &event->hw;
1972 unsigned int region = EVENT_REGION(hwc->config_base);
1973 unsigned int group = EVENT_GROUP(hwc->config_base);
1974 bool venum_event = EVENT_VENUM(hwc->config_base);
1975 bool scorpion_event = EVENT_CPU(hwc->config_base);
1976
1977 armv7pmu_clear_event_idx(cpuc, event);
1978 if (venum_event || scorpion_event) {
1979 bit = scorpion_event_to_bit(event, region, group);
1980 clear_bit(bit, cpuc->used_mask);
1981 }
1982 }
1983
1984 static int scorpion_pmu_init(struct arm_pmu *cpu_pmu)
1985 {
1986 armv7pmu_init(cpu_pmu);
1987 cpu_pmu->name = "armv7_scorpion";
1988 cpu_pmu->map_event = scorpion_map_event;
1989 cpu_pmu->reset = scorpion_pmu_reset;
1990 cpu_pmu->enable = scorpion_pmu_enable_event;
1991 cpu_pmu->disable = scorpion_pmu_disable_event;
1992 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
1993 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
1994 return armv7_probe_num_events(cpu_pmu);
1995 }
1996
1997 static int scorpion_mp_pmu_init(struct arm_pmu *cpu_pmu)
1998 {
1999 armv7pmu_init(cpu_pmu);
2000 cpu_pmu->name = "armv7_scorpion_mp";
2001 cpu_pmu->map_event = scorpion_map_event;
2002 cpu_pmu->reset = scorpion_pmu_reset;
2003 cpu_pmu->enable = scorpion_pmu_enable_event;
2004 cpu_pmu->disable = scorpion_pmu_disable_event;
2005 cpu_pmu->get_event_idx = scorpion_pmu_get_event_idx;
2006 cpu_pmu->clear_event_idx = scorpion_pmu_clear_event_idx;
2007 return armv7_probe_num_events(cpu_pmu);
2008 }
2009
2010 static const struct of_device_id armv7_pmu_of_device_ids[] = {
2011 {.compatible = "arm,cortex-a17-pmu", .data = armv7_a17_pmu_init},
2012 {.compatible = "arm,cortex-a15-pmu", .data = armv7_a15_pmu_init},
2013 {.compatible = "arm,cortex-a12-pmu", .data = armv7_a12_pmu_init},
2014 {.compatible = "arm,cortex-a9-pmu", .data = armv7_a9_pmu_init},
2015 {.compatible = "arm,cortex-a8-pmu", .data = armv7_a8_pmu_init},
2016 {.compatible = "arm,cortex-a7-pmu", .data = armv7_a7_pmu_init},
2017 {.compatible = "arm,cortex-a5-pmu", .data = armv7_a5_pmu_init},
2018 {.compatible = "qcom,krait-pmu", .data = krait_pmu_init},
2019 {.compatible = "qcom,scorpion-pmu", .data = scorpion_pmu_init},
2020 {.compatible = "qcom,scorpion-mp-pmu", .data = scorpion_mp_pmu_init},
2021 {},
2022 };
2023
2024 static const struct pmu_probe_info armv7_pmu_probe_table[] = {
2025 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A8, armv7_a8_pmu_init),
2026 ARM_PMU_PROBE(ARM_CPU_PART_CORTEX_A9, armv7_a9_pmu_init),
2027 { }
2028 };
2029
2030
2031 static int armv7_pmu_device_probe(struct platform_device *pdev)
2032 {
2033 return arm_pmu_device_probe(pdev, armv7_pmu_of_device_ids,
2034 armv7_pmu_probe_table);
2035 }
2036
2037 static struct platform_driver armv7_pmu_driver = {
2038 .driver = {
2039 .name = "armv7-pmu",
2040 .of_match_table = armv7_pmu_of_device_ids,
2041 .suppress_bind_attrs = true,
2042 },
2043 .probe = armv7_pmu_device_probe,
2044 };
2045
2046 builtin_platform_driver(armv7_pmu_driver);
2047 #endif