1 #include <linux/module.h>
2 #include <linux/slab.h>
4 #include <linux/perf_event.h>
5 #include "perf_event.h"
7 #define UNCORE_PMU_NAME_LEN 32
8 #define UNCORE_PMU_HRTIMER_INTERVAL (60LL * NSEC_PER_SEC)
10 #define UNCORE_FIXED_EVENT 0xff
11 #define UNCORE_PMC_IDX_MAX_GENERIC 8
12 #define UNCORE_PMC_IDX_FIXED UNCORE_PMC_IDX_MAX_GENERIC
13 #define UNCORE_PMC_IDX_MAX (UNCORE_PMC_IDX_FIXED + 1)
15 #define UNCORE_EVENT_CONSTRAINT(c, n) EVENT_CONSTRAINT(c, n, 0xff)
17 /* SNB event control */
18 #define SNB_UNC_CTL_EV_SEL_MASK 0x000000ff
19 #define SNB_UNC_CTL_UMASK_MASK 0x0000ff00
20 #define SNB_UNC_CTL_EDGE_DET (1 << 18)
21 #define SNB_UNC_CTL_EN (1 << 22)
22 #define SNB_UNC_CTL_INVERT (1 << 23)
23 #define SNB_UNC_CTL_CMASK_MASK 0x1f000000
24 #define NHM_UNC_CTL_CMASK_MASK 0xff000000
25 #define NHM_UNC_FIXED_CTR_CTL_EN (1 << 0)
27 #define SNB_UNC_RAW_EVENT_MASK (SNB_UNC_CTL_EV_SEL_MASK | \
28 SNB_UNC_CTL_UMASK_MASK | \
29 SNB_UNC_CTL_EDGE_DET | \
30 SNB_UNC_CTL_INVERT | \
31 SNB_UNC_CTL_CMASK_MASK)
33 #define NHM_UNC_RAW_EVENT_MASK (SNB_UNC_CTL_EV_SEL_MASK | \
34 SNB_UNC_CTL_UMASK_MASK | \
35 SNB_UNC_CTL_EDGE_DET | \
36 SNB_UNC_CTL_INVERT | \
37 NHM_UNC_CTL_CMASK_MASK)
39 /* SNB global control register */
40 #define SNB_UNC_PERF_GLOBAL_CTL 0x391
41 #define SNB_UNC_FIXED_CTR_CTRL 0x394
42 #define SNB_UNC_FIXED_CTR 0x395
44 /* SNB uncore global control */
45 #define SNB_UNC_GLOBAL_CTL_CORE_ALL ((1 << 4) - 1)
46 #define SNB_UNC_GLOBAL_CTL_EN (1 << 29)
48 /* SNB Cbo register */
49 #define SNB_UNC_CBO_0_PERFEVTSEL0 0x700
50 #define SNB_UNC_CBO_0_PER_CTR0 0x706
51 #define SNB_UNC_CBO_MSR_OFFSET 0x10
53 /* NHM global control register */
54 #define NHM_UNC_PERF_GLOBAL_CTL 0x391
55 #define NHM_UNC_FIXED_CTR 0x394
56 #define NHM_UNC_FIXED_CTR_CTRL 0x395
58 /* NHM uncore global control */
59 #define NHM_UNC_GLOBAL_CTL_EN_PC_ALL ((1ULL << 8) - 1)
60 #define NHM_UNC_GLOBAL_CTL_EN_FC (1ULL << 32)
62 /* NHM uncore register */
63 #define NHM_UNC_PERFEVTSEL0 0x3c0
64 #define NHM_UNC_UNCORE_PMC0 0x3b0
66 /* SNB-EP Box level control */
67 #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0)
68 #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1)
69 #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8)
70 #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16)
71 #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
72 SNBEP_PMON_BOX_CTL_RST_CTRS | \
73 SNBEP_PMON_BOX_CTL_FRZ_EN)
74 /* SNB-EP event control */
75 #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff
76 #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00
77 #define SNBEP_PMON_CTL_RST (1 << 17)
78 #define SNBEP_PMON_CTL_EDGE_DET (1 << 18)
79 #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21)
80 #define SNBEP_PMON_CTL_EN (1 << 22)
81 #define SNBEP_PMON_CTL_INVERT (1 << 23)
82 #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000
83 #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
84 SNBEP_PMON_CTL_UMASK_MASK | \
85 SNBEP_PMON_CTL_EDGE_DET | \
86 SNBEP_PMON_CTL_INVERT | \
87 SNBEP_PMON_CTL_TRESH_MASK)
89 /* SNB-EP Ubox event control */
90 #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000
91 #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \
92 (SNBEP_PMON_CTL_EV_SEL_MASK | \
93 SNBEP_PMON_CTL_UMASK_MASK | \
94 SNBEP_PMON_CTL_EDGE_DET | \
95 SNBEP_PMON_CTL_INVERT | \
96 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
98 #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19)
99 #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
100 SNBEP_CBO_PMON_CTL_TID_EN)
102 /* SNB-EP PCU event control */
103 #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000
104 #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000
105 #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30)
106 #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31)
107 #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
108 (SNBEP_PMON_CTL_EV_SEL_MASK | \
109 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
110 SNBEP_PMON_CTL_EDGE_DET | \
111 SNBEP_PMON_CTL_INVERT | \
112 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
113 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
114 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
116 #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
117 (SNBEP_PMON_RAW_EVENT_MASK | \
118 SNBEP_PMON_CTL_EV_SEL_EXT)
120 /* SNB-EP pci control register */
121 #define SNBEP_PCI_PMON_BOX_CTL 0xf4
122 #define SNBEP_PCI_PMON_CTL0 0xd8
123 /* SNB-EP pci counter register */
124 #define SNBEP_PCI_PMON_CTR0 0xa0
126 /* SNB-EP home agent register */
127 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40
128 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44
129 #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48
130 /* SNB-EP memory controller register */
131 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0
132 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0
133 /* SNB-EP QPI register */
134 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228
135 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c
136 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238
137 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c
139 /* SNB-EP Ubox register */
140 #define SNBEP_U_MSR_PMON_CTR0 0xc16
141 #define SNBEP_U_MSR_PMON_CTL0 0xc10
143 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08
144 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09
146 /* SNB-EP Cbo register */
147 #define SNBEP_C0_MSR_PMON_CTR0 0xd16
148 #define SNBEP_C0_MSR_PMON_CTL0 0xd10
149 #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04
150 #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14
151 #define SNBEP_CBO_MSR_OFFSET 0x20
153 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f
154 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00
155 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000
156 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000
158 #define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \
160 .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \
161 .config_mask = (m), \
165 /* SNB-EP PCU register */
166 #define SNBEP_PCU_MSR_PMON_CTR0 0xc36
167 #define SNBEP_PCU_MSR_PMON_CTL0 0xc30
168 #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24
169 #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34
170 #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff
171 #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc
172 #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd
174 /* IVT event control */
175 #define IVT_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
176 SNBEP_PMON_BOX_CTL_RST_CTRS)
177 #define IVT_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
178 SNBEP_PMON_CTL_UMASK_MASK | \
179 SNBEP_PMON_CTL_EDGE_DET | \
180 SNBEP_PMON_CTL_TRESH_MASK)
182 #define IVT_U_MSR_PMON_GLOBAL_CTL 0xc00
183 #define IVT_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
184 #define IVT_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29)
186 #define IVT_U_MSR_PMON_RAW_EVENT_MASK \
187 (SNBEP_PMON_CTL_EV_SEL_MASK | \
188 SNBEP_PMON_CTL_UMASK_MASK | \
189 SNBEP_PMON_CTL_EDGE_DET | \
190 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
192 #define IVT_CBO_MSR_PMON_RAW_EVENT_MASK (IVT_PMON_RAW_EVENT_MASK | \
193 SNBEP_CBO_PMON_CTL_TID_EN)
195 #define IVT_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0)
196 #define IVT_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5)
197 #define IVT_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17)
198 #define IVT_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
199 #define IVT_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
200 #define IVT_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
201 #define IVT_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
202 #define IVT_CB0_MSR_PMON_BOX_FILTER_IOSC (0x1ULL << 63)
205 #define IVT_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16)
206 #define IVT_HA_PCI_PMON_RAW_EVENT_MASK \
207 (IVT_PMON_RAW_EVENT_MASK | \
208 IVT_HA_PCI_PMON_CTL_Q_OCC_RST)
210 #define IVT_PCU_MSR_PMON_RAW_EVENT_MASK \
211 (SNBEP_PMON_CTL_EV_SEL_MASK | \
212 SNBEP_PMON_CTL_EV_SEL_EXT | \
213 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
214 SNBEP_PMON_CTL_EDGE_DET | \
215 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
216 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
217 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
219 #define IVT_QPI_PCI_PMON_RAW_EVENT_MASK \
220 (IVT_PMON_RAW_EVENT_MASK | \
221 SNBEP_PMON_CTL_EV_SEL_EXT)
223 /* NHM-EX event control */
224 #define NHMEX_PMON_CTL_EV_SEL_MASK 0x000000ff
225 #define NHMEX_PMON_CTL_UMASK_MASK 0x0000ff00
226 #define NHMEX_PMON_CTL_EN_BIT0 (1 << 0)
227 #define NHMEX_PMON_CTL_EDGE_DET (1 << 18)
228 #define NHMEX_PMON_CTL_PMI_EN (1 << 20)
229 #define NHMEX_PMON_CTL_EN_BIT22 (1 << 22)
230 #define NHMEX_PMON_CTL_INVERT (1 << 23)
231 #define NHMEX_PMON_CTL_TRESH_MASK 0xff000000
232 #define NHMEX_PMON_RAW_EVENT_MASK (NHMEX_PMON_CTL_EV_SEL_MASK | \
233 NHMEX_PMON_CTL_UMASK_MASK | \
234 NHMEX_PMON_CTL_EDGE_DET | \
235 NHMEX_PMON_CTL_INVERT | \
236 NHMEX_PMON_CTL_TRESH_MASK)
239 #define NHMEX_U_MSR_PMON_GLOBAL_CTL 0xc00
240 #define NHMEX_U_MSR_PMON_CTR 0xc11
241 #define NHMEX_U_MSR_PMON_EV_SEL 0xc10
243 #define NHMEX_U_PMON_GLOBAL_EN (1 << 0)
244 #define NHMEX_U_PMON_GLOBAL_PMI_CORE_SEL 0x0000001e
245 #define NHMEX_U_PMON_GLOBAL_EN_ALL (1 << 28)
246 #define NHMEX_U_PMON_GLOBAL_RST_ALL (1 << 29)
247 #define NHMEX_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
249 #define NHMEX_U_PMON_RAW_EVENT_MASK \
250 (NHMEX_PMON_CTL_EV_SEL_MASK | \
251 NHMEX_PMON_CTL_EDGE_DET)
254 #define NHMEX_C0_MSR_PMON_GLOBAL_CTL 0xd00
255 #define NHMEX_C0_MSR_PMON_CTR0 0xd11
256 #define NHMEX_C0_MSR_PMON_EV_SEL0 0xd10
257 #define NHMEX_C_MSR_OFFSET 0x20
260 #define NHMEX_B0_MSR_PMON_GLOBAL_CTL 0xc20
261 #define NHMEX_B0_MSR_PMON_CTR0 0xc31
262 #define NHMEX_B0_MSR_PMON_CTL0 0xc30
263 #define NHMEX_B_MSR_OFFSET 0x40
264 #define NHMEX_B0_MSR_MATCH 0xe45
265 #define NHMEX_B0_MSR_MASK 0xe46
266 #define NHMEX_B1_MSR_MATCH 0xe4d
267 #define NHMEX_B1_MSR_MASK 0xe4e
269 #define NHMEX_B_PMON_CTL_EN (1 << 0)
270 #define NHMEX_B_PMON_CTL_EV_SEL_SHIFT 1
271 #define NHMEX_B_PMON_CTL_EV_SEL_MASK \
272 (0x1f << NHMEX_B_PMON_CTL_EV_SEL_SHIFT)
273 #define NHMEX_B_PMON_CTR_SHIFT 6
274 #define NHMEX_B_PMON_CTR_MASK \
275 (0x3 << NHMEX_B_PMON_CTR_SHIFT)
276 #define NHMEX_B_PMON_RAW_EVENT_MASK \
277 (NHMEX_B_PMON_CTL_EV_SEL_MASK | \
278 NHMEX_B_PMON_CTR_MASK)
281 #define NHMEX_S0_MSR_PMON_GLOBAL_CTL 0xc40
282 #define NHMEX_S0_MSR_PMON_CTR0 0xc51
283 #define NHMEX_S0_MSR_PMON_CTL0 0xc50
284 #define NHMEX_S_MSR_OFFSET 0x80
285 #define NHMEX_S0_MSR_MM_CFG 0xe48
286 #define NHMEX_S0_MSR_MATCH 0xe49
287 #define NHMEX_S0_MSR_MASK 0xe4a
288 #define NHMEX_S1_MSR_MM_CFG 0xe58
289 #define NHMEX_S1_MSR_MATCH 0xe59
290 #define NHMEX_S1_MSR_MASK 0xe5a
292 #define NHMEX_S_PMON_MM_CFG_EN (0x1ULL << 63)
293 #define NHMEX_S_EVENT_TO_R_PROG_EV 0
296 #define NHMEX_M0_MSR_GLOBAL_CTL 0xca0
297 #define NHMEX_M0_MSR_PMU_DSP 0xca5
298 #define NHMEX_M0_MSR_PMU_ISS 0xca6
299 #define NHMEX_M0_MSR_PMU_MAP 0xca7
300 #define NHMEX_M0_MSR_PMU_MSC_THR 0xca8
301 #define NHMEX_M0_MSR_PMU_PGT 0xca9
302 #define NHMEX_M0_MSR_PMU_PLD 0xcaa
303 #define NHMEX_M0_MSR_PMU_ZDP_CTL_FVC 0xcab
304 #define NHMEX_M0_MSR_PMU_CTL0 0xcb0
305 #define NHMEX_M0_MSR_PMU_CNT0 0xcb1
306 #define NHMEX_M_MSR_OFFSET 0x40
307 #define NHMEX_M0_MSR_PMU_MM_CFG 0xe54
308 #define NHMEX_M1_MSR_PMU_MM_CFG 0xe5c
310 #define NHMEX_M_PMON_MM_CFG_EN (1ULL << 63)
311 #define NHMEX_M_PMON_ADDR_MATCH_MASK 0x3ffffffffULL
312 #define NHMEX_M_PMON_ADDR_MASK_MASK 0x7ffffffULL
313 #define NHMEX_M_PMON_ADDR_MASK_SHIFT 34
315 #define NHMEX_M_PMON_CTL_EN (1 << 0)
316 #define NHMEX_M_PMON_CTL_PMI_EN (1 << 1)
317 #define NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT 2
318 #define NHMEX_M_PMON_CTL_COUNT_MODE_MASK \
319 (0x3 << NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT)
320 #define NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT 4
321 #define NHMEX_M_PMON_CTL_STORAGE_MODE_MASK \
322 (0x3 << NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT)
323 #define NHMEX_M_PMON_CTL_WRAP_MODE (1 << 6)
324 #define NHMEX_M_PMON_CTL_FLAG_MODE (1 << 7)
325 #define NHMEX_M_PMON_CTL_INC_SEL_SHIFT 9
326 #define NHMEX_M_PMON_CTL_INC_SEL_MASK \
327 (0x1f << NHMEX_M_PMON_CTL_INC_SEL_SHIFT)
328 #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT 19
329 #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK \
330 (0x7 << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT)
331 #define NHMEX_M_PMON_RAW_EVENT_MASK \
332 (NHMEX_M_PMON_CTL_COUNT_MODE_MASK | \
333 NHMEX_M_PMON_CTL_STORAGE_MODE_MASK | \
334 NHMEX_M_PMON_CTL_WRAP_MODE | \
335 NHMEX_M_PMON_CTL_FLAG_MODE | \
336 NHMEX_M_PMON_CTL_INC_SEL_MASK | \
337 NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK)
339 #define NHMEX_M_PMON_ZDP_CTL_FVC_MASK (((1 << 11) - 1) | (1 << 23))
340 #define NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n) (0x7 << (11 + 3 * (n)))
342 #define WSMEX_M_PMON_ZDP_CTL_FVC_MASK (((1 << 12) - 1) | (1 << 24))
343 #define WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n) (0x7 << (12 + 3 * (n)))
346 * use the 9~13 bits to select event If the 7th bit is not set,
347 * otherwise use the 19~21 bits to select event.
349 #define MBOX_INC_SEL(x) ((x) << NHMEX_M_PMON_CTL_INC_SEL_SHIFT)
350 #define MBOX_SET_FLAG_SEL(x) (((x) << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT) | \
351 NHMEX_M_PMON_CTL_FLAG_MODE)
352 #define MBOX_INC_SEL_MASK (NHMEX_M_PMON_CTL_INC_SEL_MASK | \
353 NHMEX_M_PMON_CTL_FLAG_MODE)
354 #define MBOX_SET_FLAG_SEL_MASK (NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK | \
355 NHMEX_M_PMON_CTL_FLAG_MODE)
356 #define MBOX_INC_SEL_EXTAR_REG(c, r) \
357 EVENT_EXTRA_REG(MBOX_INC_SEL(c), NHMEX_M0_MSR_PMU_##r, \
358 MBOX_INC_SEL_MASK, (u64)-1, NHMEX_M_##r)
359 #define MBOX_SET_FLAG_SEL_EXTRA_REG(c, r) \
360 EVENT_EXTRA_REG(MBOX_SET_FLAG_SEL(c), NHMEX_M0_MSR_PMU_##r, \
361 MBOX_SET_FLAG_SEL_MASK, \
362 (u64)-1, NHMEX_M_##r)
365 #define NHMEX_R_MSR_GLOBAL_CTL 0xe00
366 #define NHMEX_R_MSR_PMON_CTL0 0xe10
367 #define NHMEX_R_MSR_PMON_CNT0 0xe11
368 #define NHMEX_R_MSR_OFFSET 0x20
370 #define NHMEX_R_MSR_PORTN_QLX_CFG(n) \
371 ((n) < 4 ? (0xe0c + (n)) : (0xe2c + (n) - 4))
372 #define NHMEX_R_MSR_PORTN_IPERF_CFG0(n) (0xe04 + (n))
373 #define NHMEX_R_MSR_PORTN_IPERF_CFG1(n) (0xe24 + (n))
374 #define NHMEX_R_MSR_PORTN_XBR_OFFSET(n) \
375 (((n) < 4 ? 0 : 0x10) + (n) * 4)
376 #define NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) \
377 (0xe60 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n))
378 #define NHMEX_R_MSR_PORTN_XBR_SET1_MATCH(n) \
379 (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 1)
380 #define NHMEX_R_MSR_PORTN_XBR_SET1_MASK(n) \
381 (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 2)
382 #define NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) \
383 (0xe70 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n))
384 #define NHMEX_R_MSR_PORTN_XBR_SET2_MATCH(n) \
385 (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 1)
386 #define NHMEX_R_MSR_PORTN_XBR_SET2_MASK(n) \
387 (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 2)
389 #define NHMEX_R_PMON_CTL_EN (1 << 0)
390 #define NHMEX_R_PMON_CTL_EV_SEL_SHIFT 1
391 #define NHMEX_R_PMON_CTL_EV_SEL_MASK \
392 (0x1f << NHMEX_R_PMON_CTL_EV_SEL_SHIFT)
393 #define NHMEX_R_PMON_CTL_PMI_EN (1 << 6)
394 #define NHMEX_R_PMON_RAW_EVENT_MASK NHMEX_R_PMON_CTL_EV_SEL_MASK
397 #define NHMEX_W_MSR_GLOBAL_CTL 0xc80
398 #define NHMEX_W_MSR_PMON_CNT0 0xc90
399 #define NHMEX_W_MSR_PMON_EVT_SEL0 0xc91
400 #define NHMEX_W_MSR_PMON_FIXED_CTR 0x394
401 #define NHMEX_W_MSR_PMON_FIXED_CTL 0x395
403 #define NHMEX_W_PMON_GLOBAL_FIXED_EN (1ULL << 31)
405 struct intel_uncore_ops
;
406 struct intel_uncore_pmu
;
407 struct intel_uncore_box
;
408 struct uncore_event_desc
;
410 struct intel_uncore_type
{
423 unsigned num_shared_regs
:8;
424 unsigned single_fixed
:1;
425 unsigned pair_ctr_ctl
:1;
426 unsigned *msr_offsets
;
427 struct event_constraint unconstrainted
;
428 struct event_constraint
*constraints
;
429 struct intel_uncore_pmu
*pmus
;
430 struct intel_uncore_ops
*ops
;
431 struct uncore_event_desc
*event_descs
;
432 const struct attribute_group
*attr_groups
[4];
435 #define pmu_group attr_groups[0]
436 #define format_group attr_groups[1]
437 #define events_group attr_groups[2]
439 struct intel_uncore_ops
{
440 void (*init_box
)(struct intel_uncore_box
*);
441 void (*disable_box
)(struct intel_uncore_box
*);
442 void (*enable_box
)(struct intel_uncore_box
*);
443 void (*disable_event
)(struct intel_uncore_box
*, struct perf_event
*);
444 void (*enable_event
)(struct intel_uncore_box
*, struct perf_event
*);
445 u64 (*read_counter
)(struct intel_uncore_box
*, struct perf_event
*);
446 int (*hw_config
)(struct intel_uncore_box
*, struct perf_event
*);
447 struct event_constraint
*(*get_constraint
)(struct intel_uncore_box
*,
448 struct perf_event
*);
449 void (*put_constraint
)(struct intel_uncore_box
*, struct perf_event
*);
452 struct intel_uncore_pmu
{
454 char name
[UNCORE_PMU_NAME_LEN
];
457 struct intel_uncore_type
*type
;
458 struct intel_uncore_box
** __percpu box
;
459 struct list_head box_list
;
462 struct intel_uncore_extra_reg
{
464 u64 config
, config1
, config2
;
468 struct intel_uncore_box
{
470 int n_active
; /* number of active events */
472 int cpu
; /* cpu to collect events */
475 struct perf_event
*events
[UNCORE_PMC_IDX_MAX
];
476 struct perf_event
*event_list
[UNCORE_PMC_IDX_MAX
];
477 unsigned long active_mask
[BITS_TO_LONGS(UNCORE_PMC_IDX_MAX
)];
478 u64 tags
[UNCORE_PMC_IDX_MAX
];
479 struct pci_dev
*pci_dev
;
480 struct intel_uncore_pmu
*pmu
;
481 struct hrtimer hrtimer
;
482 struct list_head list
;
483 struct intel_uncore_extra_reg shared_regs
[0];
486 #define UNCORE_BOX_FLAG_INITIATED 0
488 struct uncore_event_desc
{
489 struct kobj_attribute attr
;
493 #define INTEL_UNCORE_EVENT_DESC(_name, _config) \
495 .attr = __ATTR(_name, 0444, uncore_event_show, NULL), \
499 #define DEFINE_UNCORE_FORMAT_ATTR(_var, _name, _format) \
500 static ssize_t __uncore_##_var##_show(struct kobject *kobj, \
501 struct kobj_attribute *attr, \
504 BUILD_BUG_ON(sizeof(_format) >= PAGE_SIZE); \
505 return sprintf(page, _format "\n"); \
507 static struct kobj_attribute format_attr_##_var = \
508 __ATTR(_name, 0444, __uncore_##_var##_show, NULL)
511 static ssize_t
uncore_event_show(struct kobject
*kobj
,
512 struct kobj_attribute
*attr
, char *buf
)
514 struct uncore_event_desc
*event
=
515 container_of(attr
, struct uncore_event_desc
, attr
);
516 return sprintf(buf
, "%s", event
->config
);
519 static inline unsigned uncore_pci_box_ctl(struct intel_uncore_box
*box
)
521 return box
->pmu
->type
->box_ctl
;
524 static inline unsigned uncore_pci_fixed_ctl(struct intel_uncore_box
*box
)
526 return box
->pmu
->type
->fixed_ctl
;
529 static inline unsigned uncore_pci_fixed_ctr(struct intel_uncore_box
*box
)
531 return box
->pmu
->type
->fixed_ctr
;
535 unsigned uncore_pci_event_ctl(struct intel_uncore_box
*box
, int idx
)
537 return idx
* 4 + box
->pmu
->type
->event_ctl
;
541 unsigned uncore_pci_perf_ctr(struct intel_uncore_box
*box
, int idx
)
543 return idx
* 8 + box
->pmu
->type
->perf_ctr
;
546 static inline unsigned uncore_msr_box_offset(struct intel_uncore_box
*box
)
548 struct intel_uncore_pmu
*pmu
= box
->pmu
;
549 return pmu
->type
->msr_offsets
?
550 pmu
->type
->msr_offsets
[pmu
->pmu_idx
] :
551 pmu
->type
->msr_offset
* pmu
->pmu_idx
;
554 static inline unsigned uncore_msr_box_ctl(struct intel_uncore_box
*box
)
556 if (!box
->pmu
->type
->box_ctl
)
558 return box
->pmu
->type
->box_ctl
+ uncore_msr_box_offset(box
);
561 static inline unsigned uncore_msr_fixed_ctl(struct intel_uncore_box
*box
)
563 if (!box
->pmu
->type
->fixed_ctl
)
565 return box
->pmu
->type
->fixed_ctl
+ uncore_msr_box_offset(box
);
568 static inline unsigned uncore_msr_fixed_ctr(struct intel_uncore_box
*box
)
570 return box
->pmu
->type
->fixed_ctr
+ uncore_msr_box_offset(box
);
574 unsigned uncore_msr_event_ctl(struct intel_uncore_box
*box
, int idx
)
576 return box
->pmu
->type
->event_ctl
+
577 (box
->pmu
->type
->pair_ctr_ctl
? 2 * idx
: idx
) +
578 uncore_msr_box_offset(box
);
582 unsigned uncore_msr_perf_ctr(struct intel_uncore_box
*box
, int idx
)
584 return box
->pmu
->type
->perf_ctr
+
585 (box
->pmu
->type
->pair_ctr_ctl
? 2 * idx
: idx
) +
586 uncore_msr_box_offset(box
);
590 unsigned uncore_fixed_ctl(struct intel_uncore_box
*box
)
593 return uncore_pci_fixed_ctl(box
);
595 return uncore_msr_fixed_ctl(box
);
599 unsigned uncore_fixed_ctr(struct intel_uncore_box
*box
)
602 return uncore_pci_fixed_ctr(box
);
604 return uncore_msr_fixed_ctr(box
);
608 unsigned uncore_event_ctl(struct intel_uncore_box
*box
, int idx
)
611 return uncore_pci_event_ctl(box
, idx
);
613 return uncore_msr_event_ctl(box
, idx
);
617 unsigned uncore_perf_ctr(struct intel_uncore_box
*box
, int idx
)
620 return uncore_pci_perf_ctr(box
, idx
);
622 return uncore_msr_perf_ctr(box
, idx
);
625 static inline int uncore_perf_ctr_bits(struct intel_uncore_box
*box
)
627 return box
->pmu
->type
->perf_ctr_bits
;
630 static inline int uncore_fixed_ctr_bits(struct intel_uncore_box
*box
)
632 return box
->pmu
->type
->fixed_ctr_bits
;
635 static inline int uncore_num_counters(struct intel_uncore_box
*box
)
637 return box
->pmu
->type
->num_counters
;
640 static inline void uncore_disable_box(struct intel_uncore_box
*box
)
642 if (box
->pmu
->type
->ops
->disable_box
)
643 box
->pmu
->type
->ops
->disable_box(box
);
646 static inline void uncore_enable_box(struct intel_uncore_box
*box
)
648 if (box
->pmu
->type
->ops
->enable_box
)
649 box
->pmu
->type
->ops
->enable_box(box
);
652 static inline void uncore_disable_event(struct intel_uncore_box
*box
,
653 struct perf_event
*event
)
655 box
->pmu
->type
->ops
->disable_event(box
, event
);
658 static inline void uncore_enable_event(struct intel_uncore_box
*box
,
659 struct perf_event
*event
)
661 box
->pmu
->type
->ops
->enable_event(box
, event
);
664 static inline u64
uncore_read_counter(struct intel_uncore_box
*box
,
665 struct perf_event
*event
)
667 return box
->pmu
->type
->ops
->read_counter(box
, event
);
670 static inline void uncore_box_init(struct intel_uncore_box
*box
)
672 if (!test_and_set_bit(UNCORE_BOX_FLAG_INITIATED
, &box
->flags
)) {
673 if (box
->pmu
->type
->ops
->init_box
)
674 box
->pmu
->type
->ops
->init_box(box
);
678 static inline bool uncore_box_is_fake(struct intel_uncore_box
*box
)
680 return (box
->phys_id
< 0);