Merge tag 'pci-v3.15-changes' of git://git.kernel.org/pub/scm/linux/kernel/git/helgaa...
[deliverable/linux.git] / arch / x86 / kernel / cpu / perf_event_intel_uncore.h
1 #include <linux/module.h>
2 #include <linux/slab.h>
3 #include <linux/pci.h>
4 #include <linux/perf_event.h>
5 #include "perf_event.h"
6
7 #define UNCORE_PMU_NAME_LEN 32
8 #define UNCORE_PMU_HRTIMER_INTERVAL (60LL * NSEC_PER_SEC)
9 #define UNCORE_SNB_IMC_HRTIMER_INTERVAL (5ULL * NSEC_PER_SEC)
10
11 #define UNCORE_FIXED_EVENT 0xff
12 #define UNCORE_PMC_IDX_MAX_GENERIC 8
13 #define UNCORE_PMC_IDX_FIXED UNCORE_PMC_IDX_MAX_GENERIC
14 #define UNCORE_PMC_IDX_MAX (UNCORE_PMC_IDX_FIXED + 1)
15
16 #define UNCORE_PCI_DEV_DATA(type, idx) ((type << 8) | idx)
17 #define UNCORE_PCI_DEV_TYPE(data) ((data >> 8) & 0xff)
18 #define UNCORE_PCI_DEV_IDX(data) (data & 0xff)
19 #define UNCORE_EXTRA_PCI_DEV 0xff
20 #define UNCORE_EXTRA_PCI_DEV_MAX 2
21
22 /* support up to 8 sockets */
23 #define UNCORE_SOCKET_MAX 8
24
25 #define UNCORE_EVENT_CONSTRAINT(c, n) EVENT_CONSTRAINT(c, n, 0xff)
26
27 /* SNB event control */
28 #define SNB_UNC_CTL_EV_SEL_MASK 0x000000ff
29 #define SNB_UNC_CTL_UMASK_MASK 0x0000ff00
30 #define SNB_UNC_CTL_EDGE_DET (1 << 18)
31 #define SNB_UNC_CTL_EN (1 << 22)
32 #define SNB_UNC_CTL_INVERT (1 << 23)
33 #define SNB_UNC_CTL_CMASK_MASK 0x1f000000
34 #define NHM_UNC_CTL_CMASK_MASK 0xff000000
35 #define NHM_UNC_FIXED_CTR_CTL_EN (1 << 0)
36
37 #define SNB_UNC_RAW_EVENT_MASK (SNB_UNC_CTL_EV_SEL_MASK | \
38 SNB_UNC_CTL_UMASK_MASK | \
39 SNB_UNC_CTL_EDGE_DET | \
40 SNB_UNC_CTL_INVERT | \
41 SNB_UNC_CTL_CMASK_MASK)
42
43 #define NHM_UNC_RAW_EVENT_MASK (SNB_UNC_CTL_EV_SEL_MASK | \
44 SNB_UNC_CTL_UMASK_MASK | \
45 SNB_UNC_CTL_EDGE_DET | \
46 SNB_UNC_CTL_INVERT | \
47 NHM_UNC_CTL_CMASK_MASK)
48
49 /* SNB global control register */
50 #define SNB_UNC_PERF_GLOBAL_CTL 0x391
51 #define SNB_UNC_FIXED_CTR_CTRL 0x394
52 #define SNB_UNC_FIXED_CTR 0x395
53
54 /* SNB uncore global control */
55 #define SNB_UNC_GLOBAL_CTL_CORE_ALL ((1 << 4) - 1)
56 #define SNB_UNC_GLOBAL_CTL_EN (1 << 29)
57
58 /* SNB Cbo register */
59 #define SNB_UNC_CBO_0_PERFEVTSEL0 0x700
60 #define SNB_UNC_CBO_0_PER_CTR0 0x706
61 #define SNB_UNC_CBO_MSR_OFFSET 0x10
62
63 /* NHM global control register */
64 #define NHM_UNC_PERF_GLOBAL_CTL 0x391
65 #define NHM_UNC_FIXED_CTR 0x394
66 #define NHM_UNC_FIXED_CTR_CTRL 0x395
67
68 /* NHM uncore global control */
69 #define NHM_UNC_GLOBAL_CTL_EN_PC_ALL ((1ULL << 8) - 1)
70 #define NHM_UNC_GLOBAL_CTL_EN_FC (1ULL << 32)
71
72 /* NHM uncore register */
73 #define NHM_UNC_PERFEVTSEL0 0x3c0
74 #define NHM_UNC_UNCORE_PMC0 0x3b0
75
76 /* SNB-EP Box level control */
77 #define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0)
78 #define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1)
79 #define SNBEP_PMON_BOX_CTL_FRZ (1 << 8)
80 #define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16)
81 #define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
82 SNBEP_PMON_BOX_CTL_RST_CTRS | \
83 SNBEP_PMON_BOX_CTL_FRZ_EN)
84 /* SNB-EP event control */
85 #define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff
86 #define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00
87 #define SNBEP_PMON_CTL_RST (1 << 17)
88 #define SNBEP_PMON_CTL_EDGE_DET (1 << 18)
89 #define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21)
90 #define SNBEP_PMON_CTL_EN (1 << 22)
91 #define SNBEP_PMON_CTL_INVERT (1 << 23)
92 #define SNBEP_PMON_CTL_TRESH_MASK 0xff000000
93 #define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
94 SNBEP_PMON_CTL_UMASK_MASK | \
95 SNBEP_PMON_CTL_EDGE_DET | \
96 SNBEP_PMON_CTL_INVERT | \
97 SNBEP_PMON_CTL_TRESH_MASK)
98
99 /* SNB-EP Ubox event control */
100 #define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000
101 #define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \
102 (SNBEP_PMON_CTL_EV_SEL_MASK | \
103 SNBEP_PMON_CTL_UMASK_MASK | \
104 SNBEP_PMON_CTL_EDGE_DET | \
105 SNBEP_PMON_CTL_INVERT | \
106 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
107
108 #define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19)
109 #define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
110 SNBEP_CBO_PMON_CTL_TID_EN)
111
112 /* SNB-EP PCU event control */
113 #define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000
114 #define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000
115 #define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30)
116 #define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31)
117 #define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
118 (SNBEP_PMON_CTL_EV_SEL_MASK | \
119 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
120 SNBEP_PMON_CTL_EDGE_DET | \
121 SNBEP_PMON_CTL_EV_SEL_EXT | \
122 SNBEP_PMON_CTL_INVERT | \
123 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
124 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
125 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
126
127 #define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
128 (SNBEP_PMON_RAW_EVENT_MASK | \
129 SNBEP_PMON_CTL_EV_SEL_EXT)
130
131 /* SNB-EP pci control register */
132 #define SNBEP_PCI_PMON_BOX_CTL 0xf4
133 #define SNBEP_PCI_PMON_CTL0 0xd8
134 /* SNB-EP pci counter register */
135 #define SNBEP_PCI_PMON_CTR0 0xa0
136
137 /* SNB-EP home agent register */
138 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40
139 #define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44
140 #define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48
141 /* SNB-EP memory controller register */
142 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0
143 #define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0
144 /* SNB-EP QPI register */
145 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228
146 #define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c
147 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238
148 #define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c
149
150 /* SNB-EP Ubox register */
151 #define SNBEP_U_MSR_PMON_CTR0 0xc16
152 #define SNBEP_U_MSR_PMON_CTL0 0xc10
153
154 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08
155 #define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09
156
157 /* SNB-EP Cbo register */
158 #define SNBEP_C0_MSR_PMON_CTR0 0xd16
159 #define SNBEP_C0_MSR_PMON_CTL0 0xd10
160 #define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04
161 #define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14
162 #define SNBEP_CBO_MSR_OFFSET 0x20
163
164 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f
165 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00
166 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000
167 #define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000
168
169 #define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \
170 .event = (e), \
171 .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \
172 .config_mask = (m), \
173 .idx = (i) \
174 }
175
176 /* SNB-EP PCU register */
177 #define SNBEP_PCU_MSR_PMON_CTR0 0xc36
178 #define SNBEP_PCU_MSR_PMON_CTL0 0xc30
179 #define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24
180 #define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34
181 #define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff
182 #define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc
183 #define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd
184
185 /* IVT event control */
186 #define IVT_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
187 SNBEP_PMON_BOX_CTL_RST_CTRS)
188 #define IVT_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
189 SNBEP_PMON_CTL_UMASK_MASK | \
190 SNBEP_PMON_CTL_EDGE_DET | \
191 SNBEP_PMON_CTL_TRESH_MASK)
192 /* IVT Ubox */
193 #define IVT_U_MSR_PMON_GLOBAL_CTL 0xc00
194 #define IVT_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
195 #define IVT_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29)
196
197 #define IVT_U_MSR_PMON_RAW_EVENT_MASK \
198 (SNBEP_PMON_CTL_EV_SEL_MASK | \
199 SNBEP_PMON_CTL_UMASK_MASK | \
200 SNBEP_PMON_CTL_EDGE_DET | \
201 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
202 /* IVT Cbo */
203 #define IVT_CBO_MSR_PMON_RAW_EVENT_MASK (IVT_PMON_RAW_EVENT_MASK | \
204 SNBEP_CBO_PMON_CTL_TID_EN)
205
206 #define IVT_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0)
207 #define IVT_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5)
208 #define IVT_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17)
209 #define IVT_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
210 #define IVT_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
211 #define IVT_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
212 #define IVT_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
213 #define IVT_CB0_MSR_PMON_BOX_FILTER_IOSC (0x1ULL << 63)
214
215 /* IVT home agent */
216 #define IVT_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16)
217 #define IVT_HA_PCI_PMON_RAW_EVENT_MASK \
218 (IVT_PMON_RAW_EVENT_MASK | \
219 IVT_HA_PCI_PMON_CTL_Q_OCC_RST)
220 /* IVT PCU */
221 #define IVT_PCU_MSR_PMON_RAW_EVENT_MASK \
222 (SNBEP_PMON_CTL_EV_SEL_MASK | \
223 SNBEP_PMON_CTL_EV_SEL_EXT | \
224 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
225 SNBEP_PMON_CTL_EDGE_DET | \
226 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
227 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
228 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
229 /* IVT QPI */
230 #define IVT_QPI_PCI_PMON_RAW_EVENT_MASK \
231 (IVT_PMON_RAW_EVENT_MASK | \
232 SNBEP_PMON_CTL_EV_SEL_EXT)
233
234 /* NHM-EX event control */
235 #define NHMEX_PMON_CTL_EV_SEL_MASK 0x000000ff
236 #define NHMEX_PMON_CTL_UMASK_MASK 0x0000ff00
237 #define NHMEX_PMON_CTL_EN_BIT0 (1 << 0)
238 #define NHMEX_PMON_CTL_EDGE_DET (1 << 18)
239 #define NHMEX_PMON_CTL_PMI_EN (1 << 20)
240 #define NHMEX_PMON_CTL_EN_BIT22 (1 << 22)
241 #define NHMEX_PMON_CTL_INVERT (1 << 23)
242 #define NHMEX_PMON_CTL_TRESH_MASK 0xff000000
243 #define NHMEX_PMON_RAW_EVENT_MASK (NHMEX_PMON_CTL_EV_SEL_MASK | \
244 NHMEX_PMON_CTL_UMASK_MASK | \
245 NHMEX_PMON_CTL_EDGE_DET | \
246 NHMEX_PMON_CTL_INVERT | \
247 NHMEX_PMON_CTL_TRESH_MASK)
248
249 /* NHM-EX Ubox */
250 #define NHMEX_U_MSR_PMON_GLOBAL_CTL 0xc00
251 #define NHMEX_U_MSR_PMON_CTR 0xc11
252 #define NHMEX_U_MSR_PMON_EV_SEL 0xc10
253
254 #define NHMEX_U_PMON_GLOBAL_EN (1 << 0)
255 #define NHMEX_U_PMON_GLOBAL_PMI_CORE_SEL 0x0000001e
256 #define NHMEX_U_PMON_GLOBAL_EN_ALL (1 << 28)
257 #define NHMEX_U_PMON_GLOBAL_RST_ALL (1 << 29)
258 #define NHMEX_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
259
260 #define NHMEX_U_PMON_RAW_EVENT_MASK \
261 (NHMEX_PMON_CTL_EV_SEL_MASK | \
262 NHMEX_PMON_CTL_EDGE_DET)
263
264 /* NHM-EX Cbox */
265 #define NHMEX_C0_MSR_PMON_GLOBAL_CTL 0xd00
266 #define NHMEX_C0_MSR_PMON_CTR0 0xd11
267 #define NHMEX_C0_MSR_PMON_EV_SEL0 0xd10
268 #define NHMEX_C_MSR_OFFSET 0x20
269
270 /* NHM-EX Bbox */
271 #define NHMEX_B0_MSR_PMON_GLOBAL_CTL 0xc20
272 #define NHMEX_B0_MSR_PMON_CTR0 0xc31
273 #define NHMEX_B0_MSR_PMON_CTL0 0xc30
274 #define NHMEX_B_MSR_OFFSET 0x40
275 #define NHMEX_B0_MSR_MATCH 0xe45
276 #define NHMEX_B0_MSR_MASK 0xe46
277 #define NHMEX_B1_MSR_MATCH 0xe4d
278 #define NHMEX_B1_MSR_MASK 0xe4e
279
280 #define NHMEX_B_PMON_CTL_EN (1 << 0)
281 #define NHMEX_B_PMON_CTL_EV_SEL_SHIFT 1
282 #define NHMEX_B_PMON_CTL_EV_SEL_MASK \
283 (0x1f << NHMEX_B_PMON_CTL_EV_SEL_SHIFT)
284 #define NHMEX_B_PMON_CTR_SHIFT 6
285 #define NHMEX_B_PMON_CTR_MASK \
286 (0x3 << NHMEX_B_PMON_CTR_SHIFT)
287 #define NHMEX_B_PMON_RAW_EVENT_MASK \
288 (NHMEX_B_PMON_CTL_EV_SEL_MASK | \
289 NHMEX_B_PMON_CTR_MASK)
290
291 /* NHM-EX Sbox */
292 #define NHMEX_S0_MSR_PMON_GLOBAL_CTL 0xc40
293 #define NHMEX_S0_MSR_PMON_CTR0 0xc51
294 #define NHMEX_S0_MSR_PMON_CTL0 0xc50
295 #define NHMEX_S_MSR_OFFSET 0x80
296 #define NHMEX_S0_MSR_MM_CFG 0xe48
297 #define NHMEX_S0_MSR_MATCH 0xe49
298 #define NHMEX_S0_MSR_MASK 0xe4a
299 #define NHMEX_S1_MSR_MM_CFG 0xe58
300 #define NHMEX_S1_MSR_MATCH 0xe59
301 #define NHMEX_S1_MSR_MASK 0xe5a
302
303 #define NHMEX_S_PMON_MM_CFG_EN (0x1ULL << 63)
304 #define NHMEX_S_EVENT_TO_R_PROG_EV 0
305
306 /* NHM-EX Mbox */
307 #define NHMEX_M0_MSR_GLOBAL_CTL 0xca0
308 #define NHMEX_M0_MSR_PMU_DSP 0xca5
309 #define NHMEX_M0_MSR_PMU_ISS 0xca6
310 #define NHMEX_M0_MSR_PMU_MAP 0xca7
311 #define NHMEX_M0_MSR_PMU_MSC_THR 0xca8
312 #define NHMEX_M0_MSR_PMU_PGT 0xca9
313 #define NHMEX_M0_MSR_PMU_PLD 0xcaa
314 #define NHMEX_M0_MSR_PMU_ZDP_CTL_FVC 0xcab
315 #define NHMEX_M0_MSR_PMU_CTL0 0xcb0
316 #define NHMEX_M0_MSR_PMU_CNT0 0xcb1
317 #define NHMEX_M_MSR_OFFSET 0x40
318 #define NHMEX_M0_MSR_PMU_MM_CFG 0xe54
319 #define NHMEX_M1_MSR_PMU_MM_CFG 0xe5c
320
321 #define NHMEX_M_PMON_MM_CFG_EN (1ULL << 63)
322 #define NHMEX_M_PMON_ADDR_MATCH_MASK 0x3ffffffffULL
323 #define NHMEX_M_PMON_ADDR_MASK_MASK 0x7ffffffULL
324 #define NHMEX_M_PMON_ADDR_MASK_SHIFT 34
325
326 #define NHMEX_M_PMON_CTL_EN (1 << 0)
327 #define NHMEX_M_PMON_CTL_PMI_EN (1 << 1)
328 #define NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT 2
329 #define NHMEX_M_PMON_CTL_COUNT_MODE_MASK \
330 (0x3 << NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT)
331 #define NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT 4
332 #define NHMEX_M_PMON_CTL_STORAGE_MODE_MASK \
333 (0x3 << NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT)
334 #define NHMEX_M_PMON_CTL_WRAP_MODE (1 << 6)
335 #define NHMEX_M_PMON_CTL_FLAG_MODE (1 << 7)
336 #define NHMEX_M_PMON_CTL_INC_SEL_SHIFT 9
337 #define NHMEX_M_PMON_CTL_INC_SEL_MASK \
338 (0x1f << NHMEX_M_PMON_CTL_INC_SEL_SHIFT)
339 #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT 19
340 #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK \
341 (0x7 << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT)
342 #define NHMEX_M_PMON_RAW_EVENT_MASK \
343 (NHMEX_M_PMON_CTL_COUNT_MODE_MASK | \
344 NHMEX_M_PMON_CTL_STORAGE_MODE_MASK | \
345 NHMEX_M_PMON_CTL_WRAP_MODE | \
346 NHMEX_M_PMON_CTL_FLAG_MODE | \
347 NHMEX_M_PMON_CTL_INC_SEL_MASK | \
348 NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK)
349
350 #define NHMEX_M_PMON_ZDP_CTL_FVC_MASK (((1 << 11) - 1) | (1 << 23))
351 #define NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n) (0x7ULL << (11 + 3 * (n)))
352
353 #define WSMEX_M_PMON_ZDP_CTL_FVC_MASK (((1 << 12) - 1) | (1 << 24))
354 #define WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n) (0x7ULL << (12 + 3 * (n)))
355
356 /*
357 * use the 9~13 bits to select event If the 7th bit is not set,
358 * otherwise use the 19~21 bits to select event.
359 */
360 #define MBOX_INC_SEL(x) ((x) << NHMEX_M_PMON_CTL_INC_SEL_SHIFT)
361 #define MBOX_SET_FLAG_SEL(x) (((x) << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT) | \
362 NHMEX_M_PMON_CTL_FLAG_MODE)
363 #define MBOX_INC_SEL_MASK (NHMEX_M_PMON_CTL_INC_SEL_MASK | \
364 NHMEX_M_PMON_CTL_FLAG_MODE)
365 #define MBOX_SET_FLAG_SEL_MASK (NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK | \
366 NHMEX_M_PMON_CTL_FLAG_MODE)
367 #define MBOX_INC_SEL_EXTAR_REG(c, r) \
368 EVENT_EXTRA_REG(MBOX_INC_SEL(c), NHMEX_M0_MSR_PMU_##r, \
369 MBOX_INC_SEL_MASK, (u64)-1, NHMEX_M_##r)
370 #define MBOX_SET_FLAG_SEL_EXTRA_REG(c, r) \
371 EVENT_EXTRA_REG(MBOX_SET_FLAG_SEL(c), NHMEX_M0_MSR_PMU_##r, \
372 MBOX_SET_FLAG_SEL_MASK, \
373 (u64)-1, NHMEX_M_##r)
374
375 /* NHM-EX Rbox */
376 #define NHMEX_R_MSR_GLOBAL_CTL 0xe00
377 #define NHMEX_R_MSR_PMON_CTL0 0xe10
378 #define NHMEX_R_MSR_PMON_CNT0 0xe11
379 #define NHMEX_R_MSR_OFFSET 0x20
380
381 #define NHMEX_R_MSR_PORTN_QLX_CFG(n) \
382 ((n) < 4 ? (0xe0c + (n)) : (0xe2c + (n) - 4))
383 #define NHMEX_R_MSR_PORTN_IPERF_CFG0(n) (0xe04 + (n))
384 #define NHMEX_R_MSR_PORTN_IPERF_CFG1(n) (0xe24 + (n))
385 #define NHMEX_R_MSR_PORTN_XBR_OFFSET(n) \
386 (((n) < 4 ? 0 : 0x10) + (n) * 4)
387 #define NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) \
388 (0xe60 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n))
389 #define NHMEX_R_MSR_PORTN_XBR_SET1_MATCH(n) \
390 (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 1)
391 #define NHMEX_R_MSR_PORTN_XBR_SET1_MASK(n) \
392 (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 2)
393 #define NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) \
394 (0xe70 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n))
395 #define NHMEX_R_MSR_PORTN_XBR_SET2_MATCH(n) \
396 (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 1)
397 #define NHMEX_R_MSR_PORTN_XBR_SET2_MASK(n) \
398 (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 2)
399
400 #define NHMEX_R_PMON_CTL_EN (1 << 0)
401 #define NHMEX_R_PMON_CTL_EV_SEL_SHIFT 1
402 #define NHMEX_R_PMON_CTL_EV_SEL_MASK \
403 (0x1f << NHMEX_R_PMON_CTL_EV_SEL_SHIFT)
404 #define NHMEX_R_PMON_CTL_PMI_EN (1 << 6)
405 #define NHMEX_R_PMON_RAW_EVENT_MASK NHMEX_R_PMON_CTL_EV_SEL_MASK
406
407 /* NHM-EX Wbox */
408 #define NHMEX_W_MSR_GLOBAL_CTL 0xc80
409 #define NHMEX_W_MSR_PMON_CNT0 0xc90
410 #define NHMEX_W_MSR_PMON_EVT_SEL0 0xc91
411 #define NHMEX_W_MSR_PMON_FIXED_CTR 0x394
412 #define NHMEX_W_MSR_PMON_FIXED_CTL 0x395
413
414 #define NHMEX_W_PMON_GLOBAL_FIXED_EN (1ULL << 31)
415
416 struct intel_uncore_ops;
417 struct intel_uncore_pmu;
418 struct intel_uncore_box;
419 struct uncore_event_desc;
420
421 struct intel_uncore_type {
422 const char *name;
423 int num_counters;
424 int num_boxes;
425 int perf_ctr_bits;
426 int fixed_ctr_bits;
427 unsigned perf_ctr;
428 unsigned event_ctl;
429 unsigned event_mask;
430 unsigned fixed_ctr;
431 unsigned fixed_ctl;
432 unsigned box_ctl;
433 unsigned msr_offset;
434 unsigned num_shared_regs:8;
435 unsigned single_fixed:1;
436 unsigned pair_ctr_ctl:1;
437 unsigned *msr_offsets;
438 struct event_constraint unconstrainted;
439 struct event_constraint *constraints;
440 struct intel_uncore_pmu *pmus;
441 struct intel_uncore_ops *ops;
442 struct uncore_event_desc *event_descs;
443 const struct attribute_group *attr_groups[4];
444 struct pmu *pmu; /* for custom pmu ops */
445 };
446
447 #define pmu_group attr_groups[0]
448 #define format_group attr_groups[1]
449 #define events_group attr_groups[2]
450
451 struct intel_uncore_ops {
452 void (*init_box)(struct intel_uncore_box *);
453 void (*disable_box)(struct intel_uncore_box *);
454 void (*enable_box)(struct intel_uncore_box *);
455 void (*disable_event)(struct intel_uncore_box *, struct perf_event *);
456 void (*enable_event)(struct intel_uncore_box *, struct perf_event *);
457 u64 (*read_counter)(struct intel_uncore_box *, struct perf_event *);
458 int (*hw_config)(struct intel_uncore_box *, struct perf_event *);
459 struct event_constraint *(*get_constraint)(struct intel_uncore_box *,
460 struct perf_event *);
461 void (*put_constraint)(struct intel_uncore_box *, struct perf_event *);
462 };
463
464 struct intel_uncore_pmu {
465 struct pmu pmu;
466 char name[UNCORE_PMU_NAME_LEN];
467 int pmu_idx;
468 int func_id;
469 struct intel_uncore_type *type;
470 struct intel_uncore_box ** __percpu box;
471 struct list_head box_list;
472 };
473
474 struct intel_uncore_extra_reg {
475 raw_spinlock_t lock;
476 u64 config, config1, config2;
477 atomic_t ref;
478 };
479
480 struct intel_uncore_box {
481 int phys_id;
482 int n_active; /* number of active events */
483 int n_events;
484 int cpu; /* cpu to collect events */
485 unsigned long flags;
486 atomic_t refcnt;
487 struct perf_event *events[UNCORE_PMC_IDX_MAX];
488 struct perf_event *event_list[UNCORE_PMC_IDX_MAX];
489 unsigned long active_mask[BITS_TO_LONGS(UNCORE_PMC_IDX_MAX)];
490 u64 tags[UNCORE_PMC_IDX_MAX];
491 struct pci_dev *pci_dev;
492 struct intel_uncore_pmu *pmu;
493 u64 hrtimer_duration; /* hrtimer timeout for this box */
494 struct hrtimer hrtimer;
495 struct list_head list;
496 struct list_head active_list;
497 void *io_addr;
498 struct intel_uncore_extra_reg shared_regs[0];
499 };
500
501 #define UNCORE_BOX_FLAG_INITIATED 0
502
503 struct uncore_event_desc {
504 struct kobj_attribute attr;
505 const char *config;
506 };
507
508 #define INTEL_UNCORE_EVENT_DESC(_name, _config) \
509 { \
510 .attr = __ATTR(_name, 0444, uncore_event_show, NULL), \
511 .config = _config, \
512 }
513
514 #define DEFINE_UNCORE_FORMAT_ATTR(_var, _name, _format) \
515 static ssize_t __uncore_##_var##_show(struct kobject *kobj, \
516 struct kobj_attribute *attr, \
517 char *page) \
518 { \
519 BUILD_BUG_ON(sizeof(_format) >= PAGE_SIZE); \
520 return sprintf(page, _format "\n"); \
521 } \
522 static struct kobj_attribute format_attr_##_var = \
523 __ATTR(_name, 0444, __uncore_##_var##_show, NULL)
524
525
526 static ssize_t uncore_event_show(struct kobject *kobj,
527 struct kobj_attribute *attr, char *buf)
528 {
529 struct uncore_event_desc *event =
530 container_of(attr, struct uncore_event_desc, attr);
531 return sprintf(buf, "%s", event->config);
532 }
533
534 static inline unsigned uncore_pci_box_ctl(struct intel_uncore_box *box)
535 {
536 return box->pmu->type->box_ctl;
537 }
538
539 static inline unsigned uncore_pci_fixed_ctl(struct intel_uncore_box *box)
540 {
541 return box->pmu->type->fixed_ctl;
542 }
543
544 static inline unsigned uncore_pci_fixed_ctr(struct intel_uncore_box *box)
545 {
546 return box->pmu->type->fixed_ctr;
547 }
548
549 static inline
550 unsigned uncore_pci_event_ctl(struct intel_uncore_box *box, int idx)
551 {
552 return idx * 4 + box->pmu->type->event_ctl;
553 }
554
555 static inline
556 unsigned uncore_pci_perf_ctr(struct intel_uncore_box *box, int idx)
557 {
558 return idx * 8 + box->pmu->type->perf_ctr;
559 }
560
561 static inline unsigned uncore_msr_box_offset(struct intel_uncore_box *box)
562 {
563 struct intel_uncore_pmu *pmu = box->pmu;
564 return pmu->type->msr_offsets ?
565 pmu->type->msr_offsets[pmu->pmu_idx] :
566 pmu->type->msr_offset * pmu->pmu_idx;
567 }
568
569 static inline unsigned uncore_msr_box_ctl(struct intel_uncore_box *box)
570 {
571 if (!box->pmu->type->box_ctl)
572 return 0;
573 return box->pmu->type->box_ctl + uncore_msr_box_offset(box);
574 }
575
576 static inline unsigned uncore_msr_fixed_ctl(struct intel_uncore_box *box)
577 {
578 if (!box->pmu->type->fixed_ctl)
579 return 0;
580 return box->pmu->type->fixed_ctl + uncore_msr_box_offset(box);
581 }
582
583 static inline unsigned uncore_msr_fixed_ctr(struct intel_uncore_box *box)
584 {
585 return box->pmu->type->fixed_ctr + uncore_msr_box_offset(box);
586 }
587
588 static inline
589 unsigned uncore_msr_event_ctl(struct intel_uncore_box *box, int idx)
590 {
591 return box->pmu->type->event_ctl +
592 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) +
593 uncore_msr_box_offset(box);
594 }
595
596 static inline
597 unsigned uncore_msr_perf_ctr(struct intel_uncore_box *box, int idx)
598 {
599 return box->pmu->type->perf_ctr +
600 (box->pmu->type->pair_ctr_ctl ? 2 * idx : idx) +
601 uncore_msr_box_offset(box);
602 }
603
604 static inline
605 unsigned uncore_fixed_ctl(struct intel_uncore_box *box)
606 {
607 if (box->pci_dev)
608 return uncore_pci_fixed_ctl(box);
609 else
610 return uncore_msr_fixed_ctl(box);
611 }
612
613 static inline
614 unsigned uncore_fixed_ctr(struct intel_uncore_box *box)
615 {
616 if (box->pci_dev)
617 return uncore_pci_fixed_ctr(box);
618 else
619 return uncore_msr_fixed_ctr(box);
620 }
621
622 static inline
623 unsigned uncore_event_ctl(struct intel_uncore_box *box, int idx)
624 {
625 if (box->pci_dev)
626 return uncore_pci_event_ctl(box, idx);
627 else
628 return uncore_msr_event_ctl(box, idx);
629 }
630
631 static inline
632 unsigned uncore_perf_ctr(struct intel_uncore_box *box, int idx)
633 {
634 if (box->pci_dev)
635 return uncore_pci_perf_ctr(box, idx);
636 else
637 return uncore_msr_perf_ctr(box, idx);
638 }
639
640 static inline int uncore_perf_ctr_bits(struct intel_uncore_box *box)
641 {
642 return box->pmu->type->perf_ctr_bits;
643 }
644
645 static inline int uncore_fixed_ctr_bits(struct intel_uncore_box *box)
646 {
647 return box->pmu->type->fixed_ctr_bits;
648 }
649
650 static inline int uncore_num_counters(struct intel_uncore_box *box)
651 {
652 return box->pmu->type->num_counters;
653 }
654
655 static inline void uncore_disable_box(struct intel_uncore_box *box)
656 {
657 if (box->pmu->type->ops->disable_box)
658 box->pmu->type->ops->disable_box(box);
659 }
660
661 static inline void uncore_enable_box(struct intel_uncore_box *box)
662 {
663 if (box->pmu->type->ops->enable_box)
664 box->pmu->type->ops->enable_box(box);
665 }
666
667 static inline void uncore_disable_event(struct intel_uncore_box *box,
668 struct perf_event *event)
669 {
670 box->pmu->type->ops->disable_event(box, event);
671 }
672
673 static inline void uncore_enable_event(struct intel_uncore_box *box,
674 struct perf_event *event)
675 {
676 box->pmu->type->ops->enable_event(box, event);
677 }
678
679 static inline u64 uncore_read_counter(struct intel_uncore_box *box,
680 struct perf_event *event)
681 {
682 return box->pmu->type->ops->read_counter(box, event);
683 }
684
685 static inline void uncore_box_init(struct intel_uncore_box *box)
686 {
687 if (!test_and_set_bit(UNCORE_BOX_FLAG_INITIATED, &box->flags)) {
688 if (box->pmu->type->ops->init_box)
689 box->pmu->type->ops->init_box(box);
690 }
691 }
692
693 static inline bool uncore_box_is_fake(struct intel_uncore_box *box)
694 {
695 return (box->phys_id < 0);
696 }
This page took 0.0444 seconds and 5 git commands to generate.