perf_event.c 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122
  1. /*
  2. * PMU support
  3. *
  4. * Copyright (C) 2012 ARM Limited
  5. * Author: Will Deacon <will.deacon@arm.com>
  6. *
  7. * This code is based heavily on the ARMv7 perf event code.
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License version 2 as
  11. * published by the Free Software Foundation.
  12. *
  13. * This program is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  20. */
  21. #include <asm/irq_regs.h>
  22. #include <asm/perf_event.h>
  23. #include <asm/sysreg.h>
  24. #include <asm/virt.h>
  25. #include <linux/acpi.h>
  26. #include <linux/of.h>
  27. #include <linux/perf/arm_pmu.h>
  28. #include <linux/platform_device.h>
  29. /*
  30. * ARMv8 PMUv3 Performance Events handling code.
  31. * Common event types (some are defined in asm/perf_event.h).
  32. */
  33. /* At least one of the following is required. */
  34. #define ARMV8_PMUV3_PERFCTR_INST_RETIRED 0x08
  35. #define ARMV8_PMUV3_PERFCTR_INST_SPEC 0x1B
  36. /* Common architectural events. */
  37. #define ARMV8_PMUV3_PERFCTR_LD_RETIRED 0x06
  38. #define ARMV8_PMUV3_PERFCTR_ST_RETIRED 0x07
  39. #define ARMV8_PMUV3_PERFCTR_EXC_TAKEN 0x09
  40. #define ARMV8_PMUV3_PERFCTR_EXC_RETURN 0x0A
  41. #define ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED 0x0B
  42. #define ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED 0x0C
  43. #define ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED 0x0D
  44. #define ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED 0x0E
  45. #define ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED 0x0F
  46. #define ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED 0x1C
  47. #define ARMV8_PMUV3_PERFCTR_CHAIN 0x1E
  48. #define ARMV8_PMUV3_PERFCTR_BR_RETIRED 0x21
  49. /* Common microarchitectural events. */
  50. #define ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL 0x01
  51. #define ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL 0x02
  52. #define ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL 0x05
  53. #define ARMV8_PMUV3_PERFCTR_MEM_ACCESS 0x13
  54. #define ARMV8_PMUV3_PERFCTR_L1I_CACHE 0x14
  55. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB 0x15
  56. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE 0x16
  57. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL 0x17
  58. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB 0x18
  59. #define ARMV8_PMUV3_PERFCTR_BUS_ACCESS 0x19
  60. #define ARMV8_PMUV3_PERFCTR_MEMORY_ERROR 0x1A
  61. #define ARMV8_PMUV3_PERFCTR_BUS_CYCLES 0x1D
  62. #define ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE 0x1F
  63. #define ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE 0x20
  64. #define ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED 0x22
  65. #define ARMV8_PMUV3_PERFCTR_STALL_FRONTEND 0x23
  66. #define ARMV8_PMUV3_PERFCTR_STALL_BACKEND 0x24
  67. #define ARMV8_PMUV3_PERFCTR_L1D_TLB 0x25
  68. #define ARMV8_PMUV3_PERFCTR_L1I_TLB 0x26
  69. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE 0x27
  70. #define ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL 0x28
  71. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE 0x29
  72. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL 0x2A
  73. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE 0x2B
  74. #define ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB 0x2C
  75. #define ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL 0x2D
  76. #define ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL 0x2E
  77. #define ARMV8_PMUV3_PERFCTR_L2D_TLB 0x2F
  78. #define ARMV8_PMUV3_PERFCTR_L2I_TLB 0x30
  79. /* ARMv8 recommended implementation defined event types */
  80. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD 0x40
  81. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR 0x41
  82. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD 0x42
  83. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR 0x43
  84. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_INNER 0x44
  85. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_OUTER 0x45
  86. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_VICTIM 0x46
  87. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WB_CLEAN 0x47
  88. #define ARMV8_IMPDEF_PERFCTR_L1D_CACHE_INVAL 0x48
  89. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD 0x4C
  90. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR 0x4D
  91. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD 0x4E
  92. #define ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR 0x4F
  93. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_RD 0x50
  94. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WR 0x51
  95. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_RD 0x52
  96. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_REFILL_WR 0x53
  97. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_VICTIM 0x56
  98. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_WB_CLEAN 0x57
  99. #define ARMV8_IMPDEF_PERFCTR_L2D_CACHE_INVAL 0x58
  100. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_RD 0x5C
  101. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_REFILL_WR 0x5D
  102. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_RD 0x5E
  103. #define ARMV8_IMPDEF_PERFCTR_L2D_TLB_WR 0x5F
  104. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD 0x60
  105. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR 0x61
  106. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_SHARED 0x62
  107. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NOT_SHARED 0x63
  108. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_NORMAL 0x64
  109. #define ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_PERIPH 0x65
  110. #define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_RD 0x66
  111. #define ARMV8_IMPDEF_PERFCTR_MEM_ACCESS_WR 0x67
  112. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LD_SPEC 0x68
  113. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_ST_SPEC 0x69
  114. #define ARMV8_IMPDEF_PERFCTR_UNALIGNED_LDST_SPEC 0x6A
  115. #define ARMV8_IMPDEF_PERFCTR_LDREX_SPEC 0x6C
  116. #define ARMV8_IMPDEF_PERFCTR_STREX_PASS_SPEC 0x6D
  117. #define ARMV8_IMPDEF_PERFCTR_STREX_FAIL_SPEC 0x6E
  118. #define ARMV8_IMPDEF_PERFCTR_STREX_SPEC 0x6F
  119. #define ARMV8_IMPDEF_PERFCTR_LD_SPEC 0x70
  120. #define ARMV8_IMPDEF_PERFCTR_ST_SPEC 0x71
  121. #define ARMV8_IMPDEF_PERFCTR_LDST_SPEC 0x72
  122. #define ARMV8_IMPDEF_PERFCTR_DP_SPEC 0x73
  123. #define ARMV8_IMPDEF_PERFCTR_ASE_SPEC 0x74
  124. #define ARMV8_IMPDEF_PERFCTR_VFP_SPEC 0x75
  125. #define ARMV8_IMPDEF_PERFCTR_PC_WRITE_SPEC 0x76
  126. #define ARMV8_IMPDEF_PERFCTR_CRYPTO_SPEC 0x77
  127. #define ARMV8_IMPDEF_PERFCTR_BR_IMMED_SPEC 0x78
  128. #define ARMV8_IMPDEF_PERFCTR_BR_RETURN_SPEC 0x79
  129. #define ARMV8_IMPDEF_PERFCTR_BR_INDIRECT_SPEC 0x7A
  130. #define ARMV8_IMPDEF_PERFCTR_ISB_SPEC 0x7C
  131. #define ARMV8_IMPDEF_PERFCTR_DSB_SPEC 0x7D
  132. #define ARMV8_IMPDEF_PERFCTR_DMB_SPEC 0x7E
  133. #define ARMV8_IMPDEF_PERFCTR_EXC_UNDEF 0x81
  134. #define ARMV8_IMPDEF_PERFCTR_EXC_SVC 0x82
  135. #define ARMV8_IMPDEF_PERFCTR_EXC_PABORT 0x83
  136. #define ARMV8_IMPDEF_PERFCTR_EXC_DABORT 0x84
  137. #define ARMV8_IMPDEF_PERFCTR_EXC_IRQ 0x86
  138. #define ARMV8_IMPDEF_PERFCTR_EXC_FIQ 0x87
  139. #define ARMV8_IMPDEF_PERFCTR_EXC_SMC 0x88
  140. #define ARMV8_IMPDEF_PERFCTR_EXC_HVC 0x8A
  141. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_PABORT 0x8B
  142. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_DABORT 0x8C
  143. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_OTHER 0x8D
  144. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_IRQ 0x8E
  145. #define ARMV8_IMPDEF_PERFCTR_EXC_TRAP_FIQ 0x8F
  146. #define ARMV8_IMPDEF_PERFCTR_RC_LD_SPEC 0x90
  147. #define ARMV8_IMPDEF_PERFCTR_RC_ST_SPEC 0x91
  148. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_RD 0xA0
  149. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WR 0xA1
  150. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_RD 0xA2
  151. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_REFILL_WR 0xA3
  152. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_VICTIM 0xA6
  153. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_WB_CLEAN 0xA7
  154. #define ARMV8_IMPDEF_PERFCTR_L3D_CACHE_INVAL 0xA8
  155. /* ARMv8 Cortex-A53 specific event types. */
  156. #define ARMV8_A53_PERFCTR_PREF_LINEFILL 0xC2
  157. /* ARMv8 Cavium ThunderX specific event types. */
  158. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST 0xE9
  159. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS 0xEA
  160. #define ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS 0xEB
  161. #define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS 0xEC
  162. #define ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS 0xED
  163. /* PMUv3 HW events mapping. */
  164. /*
  165. * ARMv8 Architectural defined events, not all of these may
  166. * be supported on any given implementation. Undefined events will
  167. * be disabled at run-time.
  168. */
  169. static const unsigned armv8_pmuv3_perf_map[PERF_COUNT_HW_MAX] = {
  170. PERF_MAP_ALL_UNSUPPORTED,
  171. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  172. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  173. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  174. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  175. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  176. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  177. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  178. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  179. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  180. };
  181. /* ARM Cortex-A53 HW events mapping. */
  182. static const unsigned armv8_a53_perf_map[PERF_COUNT_HW_MAX] = {
  183. PERF_MAP_ALL_UNSUPPORTED,
  184. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  185. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  186. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  187. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  188. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  189. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  190. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  191. };
  192. /* ARM Cortex-A57 and Cortex-A72 events mapping. */
  193. static const unsigned armv8_a57_perf_map[PERF_COUNT_HW_MAX] = {
  194. PERF_MAP_ALL_UNSUPPORTED,
  195. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  196. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  197. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  198. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  199. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  200. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  201. };
  202. static const unsigned armv8_thunder_perf_map[PERF_COUNT_HW_MAX] = {
  203. PERF_MAP_ALL_UNSUPPORTED,
  204. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  205. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  206. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  207. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  208. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED,
  209. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  210. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  211. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  212. };
  213. /* Broadcom Vulcan events mapping */
  214. static const unsigned armv8_vulcan_perf_map[PERF_COUNT_HW_MAX] = {
  215. PERF_MAP_ALL_UNSUPPORTED,
  216. [PERF_COUNT_HW_CPU_CYCLES] = ARMV8_PMUV3_PERFCTR_CPU_CYCLES,
  217. [PERF_COUNT_HW_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_INST_RETIRED,
  218. [PERF_COUNT_HW_CACHE_REFERENCES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  219. [PERF_COUNT_HW_CACHE_MISSES] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  220. [PERF_COUNT_HW_BRANCH_INSTRUCTIONS] = ARMV8_PMUV3_PERFCTR_BR_RETIRED,
  221. [PERF_COUNT_HW_BRANCH_MISSES] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  222. [PERF_COUNT_HW_BUS_CYCLES] = ARMV8_PMUV3_PERFCTR_BUS_CYCLES,
  223. [PERF_COUNT_HW_STALLED_CYCLES_FRONTEND] = ARMV8_PMUV3_PERFCTR_STALL_FRONTEND,
  224. [PERF_COUNT_HW_STALLED_CYCLES_BACKEND] = ARMV8_PMUV3_PERFCTR_STALL_BACKEND,
  225. };
  226. static const unsigned armv8_pmuv3_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  227. [PERF_COUNT_HW_CACHE_OP_MAX]
  228. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  229. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  230. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  231. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  232. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  233. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  234. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  235. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  236. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL,
  237. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_TLB,
  238. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  239. [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB,
  240. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  241. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  242. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  243. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  244. };
  245. static const unsigned armv8_a53_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  246. [PERF_COUNT_HW_CACHE_OP_MAX]
  247. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  248. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  249. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  250. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  251. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE,
  252. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL,
  253. [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_A53_PERFCTR_PREF_LINEFILL,
  254. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  255. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  256. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  257. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  258. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  259. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  260. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  261. };
  262. static const unsigned armv8_a57_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  263. [PERF_COUNT_HW_CACHE_OP_MAX]
  264. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  265. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  266. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  267. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  268. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  269. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR,
  270. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  271. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  272. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  273. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  274. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  275. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  276. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  277. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  278. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  279. };
  280. static const unsigned armv8_thunder_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  281. [PERF_COUNT_HW_CACHE_OP_MAX]
  282. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  283. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  284. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  285. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  286. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  287. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_MISS_ST,
  288. [C(L1D)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_ACCESS,
  289. [C(L1D)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1D_CACHE_PREF_MISS,
  290. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  291. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  292. [C(L1I)][C(OP_PREFETCH)][C(RESULT_ACCESS)] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_ACCESS,
  293. [C(L1I)][C(OP_PREFETCH)][C(RESULT_MISS)] = ARMV8_THUNDER_PERFCTR_L1I_CACHE_PREF_MISS,
  294. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD,
  295. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  296. [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR,
  297. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  298. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  299. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  300. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  301. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  302. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  303. };
  304. static const unsigned armv8_vulcan_perf_cache_map[PERF_COUNT_HW_CACHE_MAX]
  305. [PERF_COUNT_HW_CACHE_OP_MAX]
  306. [PERF_COUNT_HW_CACHE_RESULT_MAX] = {
  307. PERF_CACHE_MAP_ALL_UNSUPPORTED,
  308. [C(L1D)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_RD,
  309. [C(L1D)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_RD,
  310. [C(L1D)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_WR,
  311. [C(L1D)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_CACHE_REFILL_WR,
  312. [C(L1I)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE,
  313. [C(L1I)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL,
  314. [C(ITLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL,
  315. [C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_L1I_TLB,
  316. [C(DTLB)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_RD,
  317. [C(DTLB)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_WR,
  318. [C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_RD,
  319. [C(DTLB)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_IMPDEF_PERFCTR_L1D_TLB_REFILL_WR,
  320. [C(BPU)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  321. [C(BPU)][C(OP_READ)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  322. [C(BPU)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_PMUV3_PERFCTR_BR_PRED,
  323. [C(BPU)][C(OP_WRITE)][C(RESULT_MISS)] = ARMV8_PMUV3_PERFCTR_BR_MIS_PRED,
  324. [C(NODE)][C(OP_READ)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_RD,
  325. [C(NODE)][C(OP_WRITE)][C(RESULT_ACCESS)] = ARMV8_IMPDEF_PERFCTR_BUS_ACCESS_WR,
  326. };
  327. static ssize_t
  328. armv8pmu_events_sysfs_show(struct device *dev,
  329. struct device_attribute *attr, char *page)
  330. {
  331. struct perf_pmu_events_attr *pmu_attr;
  332. pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr);
  333. return sprintf(page, "event=0x%03llx\n", pmu_attr->id);
  334. }
  335. #define ARMV8_EVENT_ATTR_RESOLVE(m) #m
  336. #define ARMV8_EVENT_ATTR(name, config) \
  337. PMU_EVENT_ATTR(name, armv8_event_attr_##name, \
  338. config, armv8pmu_events_sysfs_show)
  339. ARMV8_EVENT_ATTR(sw_incr, ARMV8_PMUV3_PERFCTR_SW_INCR);
  340. ARMV8_EVENT_ATTR(l1i_cache_refill, ARMV8_PMUV3_PERFCTR_L1I_CACHE_REFILL);
  341. ARMV8_EVENT_ATTR(l1i_tlb_refill, ARMV8_PMUV3_PERFCTR_L1I_TLB_REFILL);
  342. ARMV8_EVENT_ATTR(l1d_cache_refill, ARMV8_PMUV3_PERFCTR_L1D_CACHE_REFILL);
  343. ARMV8_EVENT_ATTR(l1d_cache, ARMV8_PMUV3_PERFCTR_L1D_CACHE);
  344. ARMV8_EVENT_ATTR(l1d_tlb_refill, ARMV8_PMUV3_PERFCTR_L1D_TLB_REFILL);
  345. ARMV8_EVENT_ATTR(ld_retired, ARMV8_PMUV3_PERFCTR_LD_RETIRED);
  346. ARMV8_EVENT_ATTR(st_retired, ARMV8_PMUV3_PERFCTR_ST_RETIRED);
  347. ARMV8_EVENT_ATTR(inst_retired, ARMV8_PMUV3_PERFCTR_INST_RETIRED);
  348. ARMV8_EVENT_ATTR(exc_taken, ARMV8_PMUV3_PERFCTR_EXC_TAKEN);
  349. ARMV8_EVENT_ATTR(exc_return, ARMV8_PMUV3_PERFCTR_EXC_RETURN);
  350. ARMV8_EVENT_ATTR(cid_write_retired, ARMV8_PMUV3_PERFCTR_CID_WRITE_RETIRED);
  351. ARMV8_EVENT_ATTR(pc_write_retired, ARMV8_PMUV3_PERFCTR_PC_WRITE_RETIRED);
  352. ARMV8_EVENT_ATTR(br_immed_retired, ARMV8_PMUV3_PERFCTR_BR_IMMED_RETIRED);
  353. ARMV8_EVENT_ATTR(br_return_retired, ARMV8_PMUV3_PERFCTR_BR_RETURN_RETIRED);
  354. ARMV8_EVENT_ATTR(unaligned_ldst_retired, ARMV8_PMUV3_PERFCTR_UNALIGNED_LDST_RETIRED);
  355. ARMV8_EVENT_ATTR(br_mis_pred, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED);
  356. ARMV8_EVENT_ATTR(cpu_cycles, ARMV8_PMUV3_PERFCTR_CPU_CYCLES);
  357. ARMV8_EVENT_ATTR(br_pred, ARMV8_PMUV3_PERFCTR_BR_PRED);
  358. ARMV8_EVENT_ATTR(mem_access, ARMV8_PMUV3_PERFCTR_MEM_ACCESS);
  359. ARMV8_EVENT_ATTR(l1i_cache, ARMV8_PMUV3_PERFCTR_L1I_CACHE);
  360. ARMV8_EVENT_ATTR(l1d_cache_wb, ARMV8_PMUV3_PERFCTR_L1D_CACHE_WB);
  361. ARMV8_EVENT_ATTR(l2d_cache, ARMV8_PMUV3_PERFCTR_L2D_CACHE);
  362. ARMV8_EVENT_ATTR(l2d_cache_refill, ARMV8_PMUV3_PERFCTR_L2D_CACHE_REFILL);
  363. ARMV8_EVENT_ATTR(l2d_cache_wb, ARMV8_PMUV3_PERFCTR_L2D_CACHE_WB);
  364. ARMV8_EVENT_ATTR(bus_access, ARMV8_PMUV3_PERFCTR_BUS_ACCESS);
  365. ARMV8_EVENT_ATTR(memory_error, ARMV8_PMUV3_PERFCTR_MEMORY_ERROR);
  366. ARMV8_EVENT_ATTR(inst_spec, ARMV8_PMUV3_PERFCTR_INST_SPEC);
  367. ARMV8_EVENT_ATTR(ttbr_write_retired, ARMV8_PMUV3_PERFCTR_TTBR_WRITE_RETIRED);
  368. ARMV8_EVENT_ATTR(bus_cycles, ARMV8_PMUV3_PERFCTR_BUS_CYCLES);
  369. /* Don't expose the chain event in /sys, since it's useless in isolation */
  370. ARMV8_EVENT_ATTR(l1d_cache_allocate, ARMV8_PMUV3_PERFCTR_L1D_CACHE_ALLOCATE);
  371. ARMV8_EVENT_ATTR(l2d_cache_allocate, ARMV8_PMUV3_PERFCTR_L2D_CACHE_ALLOCATE);
  372. ARMV8_EVENT_ATTR(br_retired, ARMV8_PMUV3_PERFCTR_BR_RETIRED);
  373. ARMV8_EVENT_ATTR(br_mis_pred_retired, ARMV8_PMUV3_PERFCTR_BR_MIS_PRED_RETIRED);
  374. ARMV8_EVENT_ATTR(stall_frontend, ARMV8_PMUV3_PERFCTR_STALL_FRONTEND);
  375. ARMV8_EVENT_ATTR(stall_backend, ARMV8_PMUV3_PERFCTR_STALL_BACKEND);
  376. ARMV8_EVENT_ATTR(l1d_tlb, ARMV8_PMUV3_PERFCTR_L1D_TLB);
  377. ARMV8_EVENT_ATTR(l1i_tlb, ARMV8_PMUV3_PERFCTR_L1I_TLB);
  378. ARMV8_EVENT_ATTR(l2i_cache, ARMV8_PMUV3_PERFCTR_L2I_CACHE);
  379. ARMV8_EVENT_ATTR(l2i_cache_refill, ARMV8_PMUV3_PERFCTR_L2I_CACHE_REFILL);
  380. ARMV8_EVENT_ATTR(l3d_cache_allocate, ARMV8_PMUV3_PERFCTR_L3D_CACHE_ALLOCATE);
  381. ARMV8_EVENT_ATTR(l3d_cache_refill, ARMV8_PMUV3_PERFCTR_L3D_CACHE_REFILL);
  382. ARMV8_EVENT_ATTR(l3d_cache, ARMV8_PMUV3_PERFCTR_L3D_CACHE);
  383. ARMV8_EVENT_ATTR(l3d_cache_wb, ARMV8_PMUV3_PERFCTR_L3D_CACHE_WB);
  384. ARMV8_EVENT_ATTR(l2d_tlb_refill, ARMV8_PMUV3_PERFCTR_L2D_TLB_REFILL);
  385. ARMV8_EVENT_ATTR(l2i_tlb_refill, ARMV8_PMUV3_PERFCTR_L2I_TLB_REFILL);
  386. ARMV8_EVENT_ATTR(l2d_tlb, ARMV8_PMUV3_PERFCTR_L2D_TLB);
  387. ARMV8_EVENT_ATTR(l2i_tlb, ARMV8_PMUV3_PERFCTR_L2I_TLB);
  388. static struct attribute *armv8_pmuv3_event_attrs[] = {
  389. &armv8_event_attr_sw_incr.attr.attr,
  390. &armv8_event_attr_l1i_cache_refill.attr.attr,
  391. &armv8_event_attr_l1i_tlb_refill.attr.attr,
  392. &armv8_event_attr_l1d_cache_refill.attr.attr,
  393. &armv8_event_attr_l1d_cache.attr.attr,
  394. &armv8_event_attr_l1d_tlb_refill.attr.attr,
  395. &armv8_event_attr_ld_retired.attr.attr,
  396. &armv8_event_attr_st_retired.attr.attr,
  397. &armv8_event_attr_inst_retired.attr.attr,
  398. &armv8_event_attr_exc_taken.attr.attr,
  399. &armv8_event_attr_exc_return.attr.attr,
  400. &armv8_event_attr_cid_write_retired.attr.attr,
  401. &armv8_event_attr_pc_write_retired.attr.attr,
  402. &armv8_event_attr_br_immed_retired.attr.attr,
  403. &armv8_event_attr_br_return_retired.attr.attr,
  404. &armv8_event_attr_unaligned_ldst_retired.attr.attr,
  405. &armv8_event_attr_br_mis_pred.attr.attr,
  406. &armv8_event_attr_cpu_cycles.attr.attr,
  407. &armv8_event_attr_br_pred.attr.attr,
  408. &armv8_event_attr_mem_access.attr.attr,
  409. &armv8_event_attr_l1i_cache.attr.attr,
  410. &armv8_event_attr_l1d_cache_wb.attr.attr,
  411. &armv8_event_attr_l2d_cache.attr.attr,
  412. &armv8_event_attr_l2d_cache_refill.attr.attr,
  413. &armv8_event_attr_l2d_cache_wb.attr.attr,
  414. &armv8_event_attr_bus_access.attr.attr,
  415. &armv8_event_attr_memory_error.attr.attr,
  416. &armv8_event_attr_inst_spec.attr.attr,
  417. &armv8_event_attr_ttbr_write_retired.attr.attr,
  418. &armv8_event_attr_bus_cycles.attr.attr,
  419. &armv8_event_attr_l1d_cache_allocate.attr.attr,
  420. &armv8_event_attr_l2d_cache_allocate.attr.attr,
  421. &armv8_event_attr_br_retired.attr.attr,
  422. &armv8_event_attr_br_mis_pred_retired.attr.attr,
  423. &armv8_event_attr_stall_frontend.attr.attr,
  424. &armv8_event_attr_stall_backend.attr.attr,
  425. &armv8_event_attr_l1d_tlb.attr.attr,
  426. &armv8_event_attr_l1i_tlb.attr.attr,
  427. &armv8_event_attr_l2i_cache.attr.attr,
  428. &armv8_event_attr_l2i_cache_refill.attr.attr,
  429. &armv8_event_attr_l3d_cache_allocate.attr.attr,
  430. &armv8_event_attr_l3d_cache_refill.attr.attr,
  431. &armv8_event_attr_l3d_cache.attr.attr,
  432. &armv8_event_attr_l3d_cache_wb.attr.attr,
  433. &armv8_event_attr_l2d_tlb_refill.attr.attr,
  434. &armv8_event_attr_l2i_tlb_refill.attr.attr,
  435. &armv8_event_attr_l2d_tlb.attr.attr,
  436. &armv8_event_attr_l2i_tlb.attr.attr,
  437. NULL,
  438. };
  439. static umode_t
  440. armv8pmu_event_attr_is_visible(struct kobject *kobj,
  441. struct attribute *attr, int unused)
  442. {
  443. struct device *dev = kobj_to_dev(kobj);
  444. struct pmu *pmu = dev_get_drvdata(dev);
  445. struct arm_pmu *cpu_pmu = container_of(pmu, struct arm_pmu, pmu);
  446. struct perf_pmu_events_attr *pmu_attr;
  447. pmu_attr = container_of(attr, struct perf_pmu_events_attr, attr.attr);
  448. if (test_bit(pmu_attr->id, cpu_pmu->pmceid_bitmap))
  449. return attr->mode;
  450. return 0;
  451. }
  452. static struct attribute_group armv8_pmuv3_events_attr_group = {
  453. .name = "events",
  454. .attrs = armv8_pmuv3_event_attrs,
  455. .is_visible = armv8pmu_event_attr_is_visible,
  456. };
  457. PMU_FORMAT_ATTR(event, "config:0-9");
  458. static struct attribute *armv8_pmuv3_format_attrs[] = {
  459. &format_attr_event.attr,
  460. NULL,
  461. };
  462. static struct attribute_group armv8_pmuv3_format_attr_group = {
  463. .name = "format",
  464. .attrs = armv8_pmuv3_format_attrs,
  465. };
  466. /*
  467. * Perf Events' indices
  468. */
  469. #define ARMV8_IDX_CYCLE_COUNTER 0
  470. #define ARMV8_IDX_COUNTER0 1
  471. #define ARMV8_IDX_COUNTER_LAST(cpu_pmu) \
  472. (ARMV8_IDX_CYCLE_COUNTER + cpu_pmu->num_events - 1)
  473. /*
  474. * ARMv8 low level PMU access
  475. */
  476. /*
  477. * Perf Event to low level counters mapping
  478. */
  479. #define ARMV8_IDX_TO_COUNTER(x) \
  480. (((x) - ARMV8_IDX_COUNTER0) & ARMV8_PMU_COUNTER_MASK)
  481. static inline u32 armv8pmu_pmcr_read(void)
  482. {
  483. return read_sysreg(pmcr_el0);
  484. }
  485. static inline void armv8pmu_pmcr_write(u32 val)
  486. {
  487. val &= ARMV8_PMU_PMCR_MASK;
  488. isb();
  489. write_sysreg(val, pmcr_el0);
  490. }
  491. static inline int armv8pmu_has_overflowed(u32 pmovsr)
  492. {
  493. return pmovsr & ARMV8_PMU_OVERFLOWED_MASK;
  494. }
  495. static inline int armv8pmu_counter_valid(struct arm_pmu *cpu_pmu, int idx)
  496. {
  497. return idx >= ARMV8_IDX_CYCLE_COUNTER &&
  498. idx <= ARMV8_IDX_COUNTER_LAST(cpu_pmu);
  499. }
  500. static inline int armv8pmu_counter_has_overflowed(u32 pmnc, int idx)
  501. {
  502. return pmnc & BIT(ARMV8_IDX_TO_COUNTER(idx));
  503. }
  504. static inline int armv8pmu_select_counter(int idx)
  505. {
  506. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  507. write_sysreg(counter, pmselr_el0);
  508. isb();
  509. return idx;
  510. }
  511. static inline u32 armv8pmu_read_counter(struct perf_event *event)
  512. {
  513. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  514. struct hw_perf_event *hwc = &event->hw;
  515. int idx = hwc->idx;
  516. u32 value = 0;
  517. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  518. pr_err("CPU%u reading wrong counter %d\n",
  519. smp_processor_id(), idx);
  520. else if (idx == ARMV8_IDX_CYCLE_COUNTER)
  521. value = read_sysreg(pmccntr_el0);
  522. else if (armv8pmu_select_counter(idx) == idx)
  523. value = read_sysreg(pmxevcntr_el0);
  524. return value;
  525. }
  526. static inline void armv8pmu_write_counter(struct perf_event *event, u32 value)
  527. {
  528. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  529. struct hw_perf_event *hwc = &event->hw;
  530. int idx = hwc->idx;
  531. if (!armv8pmu_counter_valid(cpu_pmu, idx))
  532. pr_err("CPU%u writing wrong counter %d\n",
  533. smp_processor_id(), idx);
  534. else if (idx == ARMV8_IDX_CYCLE_COUNTER) {
  535. /*
  536. * Set the upper 32bits as this is a 64bit counter but we only
  537. * count using the lower 32bits and we want an interrupt when
  538. * it overflows.
  539. */
  540. u64 value64 = 0xffffffff00000000ULL | value;
  541. write_sysreg(value64, pmccntr_el0);
  542. } else if (armv8pmu_select_counter(idx) == idx)
  543. write_sysreg(value, pmxevcntr_el0);
  544. }
  545. static inline void armv8pmu_write_evtype(int idx, u32 val)
  546. {
  547. if (armv8pmu_select_counter(idx) == idx) {
  548. val &= ARMV8_PMU_EVTYPE_MASK;
  549. write_sysreg(val, pmxevtyper_el0);
  550. }
  551. }
  552. static inline int armv8pmu_enable_counter(int idx)
  553. {
  554. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  555. write_sysreg(BIT(counter), pmcntenset_el0);
  556. return idx;
  557. }
  558. static inline int armv8pmu_disable_counter(int idx)
  559. {
  560. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  561. write_sysreg(BIT(counter), pmcntenclr_el0);
  562. return idx;
  563. }
  564. static inline int armv8pmu_enable_intens(int idx)
  565. {
  566. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  567. write_sysreg(BIT(counter), pmintenset_el1);
  568. return idx;
  569. }
  570. static inline int armv8pmu_disable_intens(int idx)
  571. {
  572. u32 counter = ARMV8_IDX_TO_COUNTER(idx);
  573. write_sysreg(BIT(counter), pmintenclr_el1);
  574. isb();
  575. /* Clear the overflow flag in case an interrupt is pending. */
  576. write_sysreg(BIT(counter), pmovsclr_el0);
  577. isb();
  578. return idx;
  579. }
  580. static inline u32 armv8pmu_getreset_flags(void)
  581. {
  582. u32 value;
  583. /* Read */
  584. value = read_sysreg(pmovsclr_el0);
  585. /* Write to clear flags */
  586. value &= ARMV8_PMU_OVSR_MASK;
  587. write_sysreg(value, pmovsclr_el0);
  588. return value;
  589. }
  590. static void armv8pmu_enable_event(struct perf_event *event)
  591. {
  592. unsigned long flags;
  593. struct hw_perf_event *hwc = &event->hw;
  594. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  595. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  596. int idx = hwc->idx;
  597. /*
  598. * Enable counter and interrupt, and set the counter to count
  599. * the event that we're interested in.
  600. */
  601. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  602. /*
  603. * Disable counter
  604. */
  605. armv8pmu_disable_counter(idx);
  606. /*
  607. * Set event (if destined for PMNx counters).
  608. */
  609. armv8pmu_write_evtype(idx, hwc->config_base);
  610. /*
  611. * Enable interrupt for this counter
  612. */
  613. armv8pmu_enable_intens(idx);
  614. /*
  615. * Enable counter
  616. */
  617. armv8pmu_enable_counter(idx);
  618. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  619. }
  620. static void armv8pmu_disable_event(struct perf_event *event)
  621. {
  622. unsigned long flags;
  623. struct hw_perf_event *hwc = &event->hw;
  624. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  625. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  626. int idx = hwc->idx;
  627. /*
  628. * Disable counter and interrupt
  629. */
  630. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  631. /*
  632. * Disable counter
  633. */
  634. armv8pmu_disable_counter(idx);
  635. /*
  636. * Disable interrupt for this counter
  637. */
  638. armv8pmu_disable_intens(idx);
  639. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  640. }
  641. static irqreturn_t armv8pmu_handle_irq(int irq_num, void *dev)
  642. {
  643. u32 pmovsr;
  644. struct perf_sample_data data;
  645. struct arm_pmu *cpu_pmu = (struct arm_pmu *)dev;
  646. struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
  647. struct pt_regs *regs;
  648. int idx;
  649. /*
  650. * Get and reset the IRQ flags
  651. */
  652. pmovsr = armv8pmu_getreset_flags();
  653. /*
  654. * Did an overflow occur?
  655. */
  656. if (!armv8pmu_has_overflowed(pmovsr))
  657. return IRQ_NONE;
  658. /*
  659. * Handle the counter(s) overflow(s)
  660. */
  661. regs = get_irq_regs();
  662. for (idx = 0; idx < cpu_pmu->num_events; ++idx) {
  663. struct perf_event *event = cpuc->events[idx];
  664. struct hw_perf_event *hwc;
  665. /* Ignore if we don't have an event. */
  666. if (!event)
  667. continue;
  668. /*
  669. * We have a single interrupt for all counters. Check that
  670. * each counter has overflowed before we process it.
  671. */
  672. if (!armv8pmu_counter_has_overflowed(pmovsr, idx))
  673. continue;
  674. hwc = &event->hw;
  675. armpmu_event_update(event);
  676. perf_sample_data_init(&data, 0, hwc->last_period);
  677. if (!armpmu_event_set_period(event))
  678. continue;
  679. if (perf_event_overflow(event, &data, regs))
  680. cpu_pmu->disable(event);
  681. }
  682. /*
  683. * Handle the pending perf events.
  684. *
  685. * Note: this call *must* be run with interrupts disabled. For
  686. * platforms that can have the PMU interrupts raised as an NMI, this
  687. * will not work.
  688. */
  689. irq_work_run();
  690. return IRQ_HANDLED;
  691. }
  692. static void armv8pmu_start(struct arm_pmu *cpu_pmu)
  693. {
  694. unsigned long flags;
  695. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  696. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  697. /* Enable all counters */
  698. armv8pmu_pmcr_write(armv8pmu_pmcr_read() | ARMV8_PMU_PMCR_E);
  699. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  700. }
  701. static void armv8pmu_stop(struct arm_pmu *cpu_pmu)
  702. {
  703. unsigned long flags;
  704. struct pmu_hw_events *events = this_cpu_ptr(cpu_pmu->hw_events);
  705. raw_spin_lock_irqsave(&events->pmu_lock, flags);
  706. /* Disable all counters */
  707. armv8pmu_pmcr_write(armv8pmu_pmcr_read() & ~ARMV8_PMU_PMCR_E);
  708. raw_spin_unlock_irqrestore(&events->pmu_lock, flags);
  709. }
  710. static int armv8pmu_get_event_idx(struct pmu_hw_events *cpuc,
  711. struct perf_event *event)
  712. {
  713. int idx;
  714. struct arm_pmu *cpu_pmu = to_arm_pmu(event->pmu);
  715. struct hw_perf_event *hwc = &event->hw;
  716. unsigned long evtype = hwc->config_base & ARMV8_PMU_EVTYPE_EVENT;
  717. /* Always place a cycle counter into the cycle counter. */
  718. if (evtype == ARMV8_PMUV3_PERFCTR_CPU_CYCLES) {
  719. if (test_and_set_bit(ARMV8_IDX_CYCLE_COUNTER, cpuc->used_mask))
  720. return -EAGAIN;
  721. return ARMV8_IDX_CYCLE_COUNTER;
  722. }
  723. /*
  724. * For anything other than a cycle counter, try and use
  725. * the events counters
  726. */
  727. for (idx = ARMV8_IDX_COUNTER0; idx < cpu_pmu->num_events; ++idx) {
  728. if (!test_and_set_bit(idx, cpuc->used_mask))
  729. return idx;
  730. }
  731. /* The counters are all in use. */
  732. return -EAGAIN;
  733. }
  734. /*
  735. * Add an event filter to a given event. This will only work for PMUv2 PMUs.
  736. */
  737. static int armv8pmu_set_event_filter(struct hw_perf_event *event,
  738. struct perf_event_attr *attr)
  739. {
  740. unsigned long config_base = 0;
  741. if (attr->exclude_idle)
  742. return -EPERM;
  743. /*
  744. * If we're running in hyp mode, then we *are* the hypervisor.
  745. * Therefore we ignore exclude_hv in this configuration, since
  746. * there's no hypervisor to sample anyway. This is consistent
  747. * with other architectures (x86 and Power).
  748. */
  749. if (is_kernel_in_hyp_mode()) {
  750. if (!attr->exclude_kernel)
  751. config_base |= ARMV8_PMU_INCLUDE_EL2;
  752. } else {
  753. if (attr->exclude_kernel)
  754. config_base |= ARMV8_PMU_EXCLUDE_EL1;
  755. if (!attr->exclude_hv)
  756. config_base |= ARMV8_PMU_INCLUDE_EL2;
  757. }
  758. if (attr->exclude_user)
  759. config_base |= ARMV8_PMU_EXCLUDE_EL0;
  760. /*
  761. * Install the filter into config_base as this is used to
  762. * construct the event type.
  763. */
  764. event->config_base = config_base;
  765. return 0;
  766. }
  767. static void armv8pmu_reset(void *info)
  768. {
  769. struct arm_pmu *cpu_pmu = (struct arm_pmu *)info;
  770. u32 idx, nb_cnt = cpu_pmu->num_events;
  771. /* The counter and interrupt enable registers are unknown at reset. */
  772. for (idx = ARMV8_IDX_CYCLE_COUNTER; idx < nb_cnt; ++idx) {
  773. armv8pmu_disable_counter(idx);
  774. armv8pmu_disable_intens(idx);
  775. }
  776. /*
  777. * Initialize & Reset PMNC. Request overflow interrupt for
  778. * 64 bit cycle counter but cheat in armv8pmu_write_counter().
  779. */
  780. armv8pmu_pmcr_write(ARMV8_PMU_PMCR_P | ARMV8_PMU_PMCR_C |
  781. ARMV8_PMU_PMCR_LC);
  782. }
  783. static int armv8_pmuv3_map_event(struct perf_event *event)
  784. {
  785. int hw_event_id;
  786. struct arm_pmu *armpmu = to_arm_pmu(event->pmu);
  787. hw_event_id = armpmu_map_event(event, &armv8_pmuv3_perf_map,
  788. &armv8_pmuv3_perf_cache_map,
  789. ARMV8_PMU_EVTYPE_EVENT);
  790. if (hw_event_id < 0)
  791. return hw_event_id;
  792. /* disable micro/arch events not supported by this PMU */
  793. if ((hw_event_id < ARMV8_PMUV3_MAX_COMMON_EVENTS) &&
  794. !test_bit(hw_event_id, armpmu->pmceid_bitmap)) {
  795. return -EOPNOTSUPP;
  796. }
  797. return hw_event_id;
  798. }
  799. static int armv8_a53_map_event(struct perf_event *event)
  800. {
  801. return armpmu_map_event(event, &armv8_a53_perf_map,
  802. &armv8_a53_perf_cache_map,
  803. ARMV8_PMU_EVTYPE_EVENT);
  804. }
  805. static int armv8_a57_map_event(struct perf_event *event)
  806. {
  807. return armpmu_map_event(event, &armv8_a57_perf_map,
  808. &armv8_a57_perf_cache_map,
  809. ARMV8_PMU_EVTYPE_EVENT);
  810. }
  811. static int armv8_thunder_map_event(struct perf_event *event)
  812. {
  813. return armpmu_map_event(event, &armv8_thunder_perf_map,
  814. &armv8_thunder_perf_cache_map,
  815. ARMV8_PMU_EVTYPE_EVENT);
  816. }
  817. static int armv8_vulcan_map_event(struct perf_event *event)
  818. {
  819. return armpmu_map_event(event, &armv8_vulcan_perf_map,
  820. &armv8_vulcan_perf_cache_map,
  821. ARMV8_PMU_EVTYPE_EVENT);
  822. }
  823. static void __armv8pmu_probe_pmu(void *info)
  824. {
  825. struct arm_pmu *cpu_pmu = info;
  826. u32 pmceid[2];
  827. /* Read the nb of CNTx counters supported from PMNC */
  828. cpu_pmu->num_events = (armv8pmu_pmcr_read() >> ARMV8_PMU_PMCR_N_SHIFT)
  829. & ARMV8_PMU_PMCR_N_MASK;
  830. /* Add the CPU cycles counter */
  831. cpu_pmu->num_events += 1;
  832. pmceid[0] = read_sysreg(pmceid0_el0);
  833. pmceid[1] = read_sysreg(pmceid1_el0);
  834. bitmap_from_u32array(cpu_pmu->pmceid_bitmap,
  835. ARMV8_PMUV3_MAX_COMMON_EVENTS, pmceid,
  836. ARRAY_SIZE(pmceid));
  837. }
  838. static int armv8pmu_probe_pmu(struct arm_pmu *cpu_pmu)
  839. {
  840. return smp_call_function_any(&cpu_pmu->supported_cpus,
  841. __armv8pmu_probe_pmu,
  842. cpu_pmu, 1);
  843. }
  844. static void armv8_pmu_init(struct arm_pmu *cpu_pmu)
  845. {
  846. cpu_pmu->handle_irq = armv8pmu_handle_irq,
  847. cpu_pmu->enable = armv8pmu_enable_event,
  848. cpu_pmu->disable = armv8pmu_disable_event,
  849. cpu_pmu->read_counter = armv8pmu_read_counter,
  850. cpu_pmu->write_counter = armv8pmu_write_counter,
  851. cpu_pmu->get_event_idx = armv8pmu_get_event_idx,
  852. cpu_pmu->start = armv8pmu_start,
  853. cpu_pmu->stop = armv8pmu_stop,
  854. cpu_pmu->reset = armv8pmu_reset,
  855. cpu_pmu->max_period = (1LLU << 32) - 1,
  856. cpu_pmu->set_event_filter = armv8pmu_set_event_filter;
  857. }
  858. static int armv8_pmuv3_init(struct arm_pmu *cpu_pmu)
  859. {
  860. armv8_pmu_init(cpu_pmu);
  861. cpu_pmu->name = "armv8_pmuv3";
  862. cpu_pmu->map_event = armv8_pmuv3_map_event;
  863. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  864. &armv8_pmuv3_events_attr_group;
  865. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  866. &armv8_pmuv3_format_attr_group;
  867. return armv8pmu_probe_pmu(cpu_pmu);
  868. }
  869. static int armv8_a53_pmu_init(struct arm_pmu *cpu_pmu)
  870. {
  871. armv8_pmu_init(cpu_pmu);
  872. cpu_pmu->name = "armv8_cortex_a53";
  873. cpu_pmu->map_event = armv8_a53_map_event;
  874. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  875. &armv8_pmuv3_events_attr_group;
  876. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  877. &armv8_pmuv3_format_attr_group;
  878. return armv8pmu_probe_pmu(cpu_pmu);
  879. }
  880. static int armv8_a57_pmu_init(struct arm_pmu *cpu_pmu)
  881. {
  882. armv8_pmu_init(cpu_pmu);
  883. cpu_pmu->name = "armv8_cortex_a57";
  884. cpu_pmu->map_event = armv8_a57_map_event;
  885. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  886. &armv8_pmuv3_events_attr_group;
  887. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  888. &armv8_pmuv3_format_attr_group;
  889. return armv8pmu_probe_pmu(cpu_pmu);
  890. }
  891. static int armv8_a72_pmu_init(struct arm_pmu *cpu_pmu)
  892. {
  893. armv8_pmu_init(cpu_pmu);
  894. cpu_pmu->name = "armv8_cortex_a72";
  895. cpu_pmu->map_event = armv8_a57_map_event;
  896. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  897. &armv8_pmuv3_events_attr_group;
  898. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  899. &armv8_pmuv3_format_attr_group;
  900. return armv8pmu_probe_pmu(cpu_pmu);
  901. }
  902. static int armv8_thunder_pmu_init(struct arm_pmu *cpu_pmu)
  903. {
  904. armv8_pmu_init(cpu_pmu);
  905. cpu_pmu->name = "armv8_cavium_thunder";
  906. cpu_pmu->map_event = armv8_thunder_map_event;
  907. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  908. &armv8_pmuv3_events_attr_group;
  909. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  910. &armv8_pmuv3_format_attr_group;
  911. return armv8pmu_probe_pmu(cpu_pmu);
  912. }
  913. static int armv8_vulcan_pmu_init(struct arm_pmu *cpu_pmu)
  914. {
  915. armv8_pmu_init(cpu_pmu);
  916. cpu_pmu->name = "armv8_brcm_vulcan";
  917. cpu_pmu->map_event = armv8_vulcan_map_event;
  918. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_EVENTS] =
  919. &armv8_pmuv3_events_attr_group;
  920. cpu_pmu->attr_groups[ARMPMU_ATTR_GROUP_FORMATS] =
  921. &armv8_pmuv3_format_attr_group;
  922. return armv8pmu_probe_pmu(cpu_pmu);
  923. }
  924. static const struct of_device_id armv8_pmu_of_device_ids[] = {
  925. {.compatible = "arm,armv8-pmuv3", .data = armv8_pmuv3_init},
  926. {.compatible = "arm,cortex-a53-pmu", .data = armv8_a53_pmu_init},
  927. {.compatible = "arm,cortex-a57-pmu", .data = armv8_a57_pmu_init},
  928. {.compatible = "arm,cortex-a72-pmu", .data = armv8_a72_pmu_init},
  929. {.compatible = "cavium,thunder-pmu", .data = armv8_thunder_pmu_init},
  930. {.compatible = "brcm,vulcan-pmu", .data = armv8_vulcan_pmu_init},
  931. {},
  932. };
  933. /*
  934. * Non DT systems have their micro/arch events probed at run-time.
  935. * A fairly complete list of generic events are provided and ones that
  936. * aren't supported by the current PMU are disabled.
  937. */
  938. static const struct pmu_probe_info armv8_pmu_probe_table[] = {
  939. PMU_PROBE(0, 0, armv8_pmuv3_init), /* enable all defined counters */
  940. { /* sentinel value */ }
  941. };
  942. static int armv8_pmu_device_probe(struct platform_device *pdev)
  943. {
  944. if (acpi_disabled)
  945. return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids,
  946. NULL);
  947. return arm_pmu_device_probe(pdev, armv8_pmu_of_device_ids,
  948. armv8_pmu_probe_table);
  949. }
  950. static struct platform_driver armv8_pmu_driver = {
  951. .driver = {
  952. .name = ARMV8_PMU_PDEV_NAME,
  953. .of_match_table = armv8_pmu_of_device_ids,
  954. },
  955. .probe = armv8_pmu_device_probe,
  956. };
  957. builtin_platform_driver(armv8_pmu_driver);