1
2
3#include "uncore.h"
4
5
6#define SNBEP_CPUNODEID 0x40
7#define SNBEP_GIDNIDMAP 0x54
8
9
10#define SNBEP_PMON_BOX_CTL_RST_CTRL (1 << 0)
11#define SNBEP_PMON_BOX_CTL_RST_CTRS (1 << 1)
12#define SNBEP_PMON_BOX_CTL_FRZ (1 << 8)
13#define SNBEP_PMON_BOX_CTL_FRZ_EN (1 << 16)
14#define SNBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
15 SNBEP_PMON_BOX_CTL_RST_CTRS | \
16 SNBEP_PMON_BOX_CTL_FRZ_EN)
17
18#define SNBEP_PMON_CTL_EV_SEL_MASK 0x000000ff
19#define SNBEP_PMON_CTL_UMASK_MASK 0x0000ff00
20#define SNBEP_PMON_CTL_RST (1 << 17)
21#define SNBEP_PMON_CTL_EDGE_DET (1 << 18)
22#define SNBEP_PMON_CTL_EV_SEL_EXT (1 << 21)
23#define SNBEP_PMON_CTL_EN (1 << 22)
24#define SNBEP_PMON_CTL_INVERT (1 << 23)
25#define SNBEP_PMON_CTL_TRESH_MASK 0xff000000
26#define SNBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
27 SNBEP_PMON_CTL_UMASK_MASK | \
28 SNBEP_PMON_CTL_EDGE_DET | \
29 SNBEP_PMON_CTL_INVERT | \
30 SNBEP_PMON_CTL_TRESH_MASK)
31
32
33#define SNBEP_U_MSR_PMON_CTL_TRESH_MASK 0x1f000000
34#define SNBEP_U_MSR_PMON_RAW_EVENT_MASK \
35 (SNBEP_PMON_CTL_EV_SEL_MASK | \
36 SNBEP_PMON_CTL_UMASK_MASK | \
37 SNBEP_PMON_CTL_EDGE_DET | \
38 SNBEP_PMON_CTL_INVERT | \
39 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
40
41#define SNBEP_CBO_PMON_CTL_TID_EN (1 << 19)
42#define SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
43 SNBEP_CBO_PMON_CTL_TID_EN)
44
45
46#define SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK 0x0000c000
47#define SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK 0x1f000000
48#define SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT (1 << 30)
49#define SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET (1 << 31)
50#define SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
51 (SNBEP_PMON_CTL_EV_SEL_MASK | \
52 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
53 SNBEP_PMON_CTL_EDGE_DET | \
54 SNBEP_PMON_CTL_INVERT | \
55 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
56 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
57 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
58
59#define SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
60 (SNBEP_PMON_RAW_EVENT_MASK | \
61 SNBEP_PMON_CTL_EV_SEL_EXT)
62
63
64#define SNBEP_PCI_PMON_BOX_CTL 0xf4
65#define SNBEP_PCI_PMON_CTL0 0xd8
66
67#define SNBEP_PCI_PMON_CTR0 0xa0
68
69
70#define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH0 0x40
71#define SNBEP_HA_PCI_PMON_BOX_ADDRMATCH1 0x44
72#define SNBEP_HA_PCI_PMON_BOX_OPCODEMATCH 0x48
73
74#define SNBEP_MC_CHy_PCI_PMON_FIXED_CTL 0xf0
75#define SNBEP_MC_CHy_PCI_PMON_FIXED_CTR 0xd0
76
77#define SNBEP_Q_Py_PCI_PMON_PKT_MATCH0 0x228
78#define SNBEP_Q_Py_PCI_PMON_PKT_MATCH1 0x22c
79#define SNBEP_Q_Py_PCI_PMON_PKT_MASK0 0x238
80#define SNBEP_Q_Py_PCI_PMON_PKT_MASK1 0x23c
81
82
83#define SNBEP_U_MSR_PMON_CTR0 0xc16
84#define SNBEP_U_MSR_PMON_CTL0 0xc10
85
86#define SNBEP_U_MSR_PMON_UCLK_FIXED_CTL 0xc08
87#define SNBEP_U_MSR_PMON_UCLK_FIXED_CTR 0xc09
88
89
90#define SNBEP_C0_MSR_PMON_CTR0 0xd16
91#define SNBEP_C0_MSR_PMON_CTL0 0xd10
92#define SNBEP_C0_MSR_PMON_BOX_CTL 0xd04
93#define SNBEP_C0_MSR_PMON_BOX_FILTER 0xd14
94#define SNBEP_CBO_MSR_OFFSET 0x20
95
96#define SNBEP_CB0_MSR_PMON_BOX_FILTER_TID 0x1f
97#define SNBEP_CB0_MSR_PMON_BOX_FILTER_NID 0x3fc00
98#define SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE 0x7c0000
99#define SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC 0xff800000
100
101#define SNBEP_CBO_EVENT_EXTRA_REG(e, m, i) { \
102 .event = (e), \
103 .msr = SNBEP_C0_MSR_PMON_BOX_FILTER, \
104 .config_mask = (m), \
105 .idx = (i) \
106}
107
108
109#define SNBEP_PCU_MSR_PMON_CTR0 0xc36
110#define SNBEP_PCU_MSR_PMON_CTL0 0xc30
111#define SNBEP_PCU_MSR_PMON_BOX_CTL 0xc24
112#define SNBEP_PCU_MSR_PMON_BOX_FILTER 0xc34
113#define SNBEP_PCU_MSR_PMON_BOX_FILTER_MASK 0xffffffff
114#define SNBEP_PCU_MSR_CORE_C3_CTR 0x3fc
115#define SNBEP_PCU_MSR_CORE_C6_CTR 0x3fd
116
117
118#define IVBEP_PMON_BOX_CTL_INT (SNBEP_PMON_BOX_CTL_RST_CTRL | \
119 SNBEP_PMON_BOX_CTL_RST_CTRS)
120#define IVBEP_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
121 SNBEP_PMON_CTL_UMASK_MASK | \
122 SNBEP_PMON_CTL_EDGE_DET | \
123 SNBEP_PMON_CTL_TRESH_MASK)
124
125#define IVBEP_U_MSR_PMON_GLOBAL_CTL 0xc00
126#define IVBEP_U_PMON_GLOBAL_FRZ_ALL (1 << 31)
127#define IVBEP_U_PMON_GLOBAL_UNFRZ_ALL (1 << 29)
128
129#define IVBEP_U_MSR_PMON_RAW_EVENT_MASK \
130 (SNBEP_PMON_CTL_EV_SEL_MASK | \
131 SNBEP_PMON_CTL_UMASK_MASK | \
132 SNBEP_PMON_CTL_EDGE_DET | \
133 SNBEP_U_MSR_PMON_CTL_TRESH_MASK)
134
135#define IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK (IVBEP_PMON_RAW_EVENT_MASK | \
136 SNBEP_CBO_PMON_CTL_TID_EN)
137
138#define IVBEP_CB0_MSR_PMON_BOX_FILTER_TID (0x1fULL << 0)
139#define IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 5)
140#define IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x3fULL << 17)
141#define IVBEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
142#define IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
143#define IVBEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
144#define IVBEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
145#define IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63)
146
147
148#define IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST (1 << 16)
149#define IVBEP_HA_PCI_PMON_RAW_EVENT_MASK \
150 (IVBEP_PMON_RAW_EVENT_MASK | \
151 IVBEP_HA_PCI_PMON_CTL_Q_OCC_RST)
152
153#define IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK \
154 (SNBEP_PMON_CTL_EV_SEL_MASK | \
155 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
156 SNBEP_PMON_CTL_EDGE_DET | \
157 SNBEP_PCU_MSR_PMON_CTL_TRESH_MASK | \
158 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
159 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
160
161#define IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK \
162 (IVBEP_PMON_RAW_EVENT_MASK | \
163 SNBEP_PMON_CTL_EV_SEL_EXT)
164
165#define __BITS_VALUE(x, i, n) ((typeof(x))(((x) >> ((i) * (n))) & \
166 ((1ULL << (n)) - 1)))
167
168
169#define HSWEP_U_MSR_PMON_CTR0 0x709
170#define HSWEP_U_MSR_PMON_CTL0 0x705
171#define HSWEP_U_MSR_PMON_FILTER 0x707
172
173#define HSWEP_U_MSR_PMON_UCLK_FIXED_CTL 0x703
174#define HSWEP_U_MSR_PMON_UCLK_FIXED_CTR 0x704
175
176#define HSWEP_U_MSR_PMON_BOX_FILTER_TID (0x1 << 0)
177#define HSWEP_U_MSR_PMON_BOX_FILTER_CID (0x1fULL << 1)
178#define HSWEP_U_MSR_PMON_BOX_FILTER_MASK \
179 (HSWEP_U_MSR_PMON_BOX_FILTER_TID | \
180 HSWEP_U_MSR_PMON_BOX_FILTER_CID)
181
182
183#define HSWEP_C0_MSR_PMON_CTR0 0xe08
184#define HSWEP_C0_MSR_PMON_CTL0 0xe01
185#define HSWEP_C0_MSR_PMON_BOX_CTL 0xe00
186#define HSWEP_C0_MSR_PMON_BOX_FILTER0 0xe05
187#define HSWEP_CBO_MSR_OFFSET 0x10
188
189
190#define HSWEP_CB0_MSR_PMON_BOX_FILTER_TID (0x3fULL << 0)
191#define HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK (0xfULL << 6)
192#define HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE (0x7fULL << 17)
193#define HSWEP_CB0_MSR_PMON_BOX_FILTER_NID (0xffffULL << 32)
194#define HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC (0x1ffULL << 52)
195#define HSWEP_CB0_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
196#define HSWEP_CB0_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
197#define HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63)
198
199
200
201#define HSWEP_S0_MSR_PMON_CTR0 0x726
202#define HSWEP_S0_MSR_PMON_CTL0 0x721
203#define HSWEP_S0_MSR_PMON_BOX_CTL 0x720
204#define HSWEP_SBOX_MSR_OFFSET 0xa
205#define HSWEP_S_MSR_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
206 SNBEP_CBO_PMON_CTL_TID_EN)
207
208
209#define HSWEP_PCU_MSR_PMON_CTR0 0x717
210#define HSWEP_PCU_MSR_PMON_CTL0 0x711
211#define HSWEP_PCU_MSR_PMON_BOX_CTL 0x710
212#define HSWEP_PCU_MSR_PMON_BOX_FILTER 0x715
213
214
215#define KNL_U_MSR_PMON_RAW_EVENT_MASK \
216 (SNBEP_U_MSR_PMON_RAW_EVENT_MASK | \
217 SNBEP_CBO_PMON_CTL_TID_EN)
218
219#define KNL_CHA_MSR_OFFSET 0xc
220#define KNL_CHA_MSR_PMON_CTL_QOR (1 << 16)
221#define KNL_CHA_MSR_PMON_RAW_EVENT_MASK \
222 (SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK | \
223 KNL_CHA_MSR_PMON_CTL_QOR)
224#define KNL_CHA_MSR_PMON_BOX_FILTER_TID 0x1ff
225#define KNL_CHA_MSR_PMON_BOX_FILTER_STATE (7 << 18)
226#define KNL_CHA_MSR_PMON_BOX_FILTER_OP (0xfffffe2aULL << 32)
227#define KNL_CHA_MSR_PMON_BOX_FILTER_REMOTE_NODE (0x1ULL << 32)
228#define KNL_CHA_MSR_PMON_BOX_FILTER_LOCAL_NODE (0x1ULL << 33)
229#define KNL_CHA_MSR_PMON_BOX_FILTER_NNC (0x1ULL << 37)
230
231
232#define KNL_UCLK_MSR_PMON_CTR0_LOW 0x400
233#define KNL_UCLK_MSR_PMON_CTL0 0x420
234#define KNL_UCLK_MSR_PMON_BOX_CTL 0x430
235#define KNL_UCLK_MSR_PMON_UCLK_FIXED_LOW 0x44c
236#define KNL_UCLK_MSR_PMON_UCLK_FIXED_CTL 0x454
237#define KNL_PMON_FIXED_CTL_EN 0x1
238
239
240#define KNL_EDC0_ECLK_MSR_PMON_CTR0_LOW 0xa00
241#define KNL_EDC0_ECLK_MSR_PMON_CTL0 0xa20
242#define KNL_EDC0_ECLK_MSR_PMON_BOX_CTL 0xa30
243#define KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_LOW 0xa3c
244#define KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_CTL 0xa44
245
246
247#define KNL_MC0_CH0_MSR_PMON_CTR0_LOW 0xb00
248#define KNL_MC0_CH0_MSR_PMON_CTL0 0xb20
249#define KNL_MC0_CH0_MSR_PMON_BOX_CTL 0xb30
250#define KNL_MC0_CH0_MSR_PMON_FIXED_LOW 0xb3c
251#define KNL_MC0_CH0_MSR_PMON_FIXED_CTL 0xb44
252
253
254#define KNL_IRP_PCI_PMON_BOX_CTL 0xf0
255#define KNL_IRP_PCI_PMON_RAW_EVENT_MASK (SNBEP_PMON_RAW_EVENT_MASK | \
256 KNL_CHA_MSR_PMON_CTL_QOR)
257
258#define KNL_PCU_PMON_CTL_EV_SEL_MASK 0x0000007f
259#define KNL_PCU_PMON_CTL_USE_OCC_CTR (1 << 7)
260#define KNL_PCU_MSR_PMON_CTL_TRESH_MASK 0x3f000000
261#define KNL_PCU_MSR_PMON_RAW_EVENT_MASK \
262 (KNL_PCU_PMON_CTL_EV_SEL_MASK | \
263 KNL_PCU_PMON_CTL_USE_OCC_CTR | \
264 SNBEP_PCU_MSR_PMON_CTL_OCC_SEL_MASK | \
265 SNBEP_PMON_CTL_EDGE_DET | \
266 SNBEP_CBO_PMON_CTL_TID_EN | \
267 SNBEP_PMON_CTL_INVERT | \
268 KNL_PCU_MSR_PMON_CTL_TRESH_MASK | \
269 SNBEP_PCU_MSR_PMON_CTL_OCC_INVERT | \
270 SNBEP_PCU_MSR_PMON_CTL_OCC_EDGE_DET)
271
272
273#define SKX_CPUNODEID 0xc0
274#define SKX_GIDNIDMAP 0xd4
275
276
277#define SKX_CHA_MSR_PMON_BOX_FILTER_TID (0x1ffULL << 0)
278#define SKX_CHA_MSR_PMON_BOX_FILTER_LINK (0xfULL << 9)
279#define SKX_CHA_MSR_PMON_BOX_FILTER_STATE (0x3ffULL << 17)
280#define SKX_CHA_MSR_PMON_BOX_FILTER_REM (0x1ULL << 32)
281#define SKX_CHA_MSR_PMON_BOX_FILTER_LOC (0x1ULL << 33)
282#define SKX_CHA_MSR_PMON_BOX_FILTER_ALL_OPC (0x1ULL << 35)
283#define SKX_CHA_MSR_PMON_BOX_FILTER_NM (0x1ULL << 36)
284#define SKX_CHA_MSR_PMON_BOX_FILTER_NOT_NM (0x1ULL << 37)
285#define SKX_CHA_MSR_PMON_BOX_FILTER_OPC0 (0x3ffULL << 41)
286#define SKX_CHA_MSR_PMON_BOX_FILTER_OPC1 (0x3ffULL << 51)
287#define SKX_CHA_MSR_PMON_BOX_FILTER_C6 (0x1ULL << 61)
288#define SKX_CHA_MSR_PMON_BOX_FILTER_NC (0x1ULL << 62)
289#define SKX_CHA_MSR_PMON_BOX_FILTER_ISOC (0x1ULL << 63)
290
291
292#define SKX_IIO0_MSR_PMON_CTL0 0xa48
293#define SKX_IIO0_MSR_PMON_CTR0 0xa41
294#define SKX_IIO0_MSR_PMON_BOX_CTL 0xa40
295#define SKX_IIO_MSR_OFFSET 0x20
296
297#define SKX_PMON_CTL_TRESH_MASK (0xff << 24)
298#define SKX_PMON_CTL_TRESH_MASK_EXT (0xf)
299#define SKX_PMON_CTL_CH_MASK (0xff << 4)
300#define SKX_PMON_CTL_FC_MASK (0x7 << 12)
301#define SKX_IIO_PMON_RAW_EVENT_MASK (SNBEP_PMON_CTL_EV_SEL_MASK | \
302 SNBEP_PMON_CTL_UMASK_MASK | \
303 SNBEP_PMON_CTL_EDGE_DET | \
304 SNBEP_PMON_CTL_INVERT | \
305 SKX_PMON_CTL_TRESH_MASK)
306#define SKX_IIO_PMON_RAW_EVENT_MASK_EXT (SKX_PMON_CTL_TRESH_MASK_EXT | \
307 SKX_PMON_CTL_CH_MASK | \
308 SKX_PMON_CTL_FC_MASK)
309
310
311#define SKX_IRP0_MSR_PMON_CTL0 0xa5b
312#define SKX_IRP0_MSR_PMON_CTR0 0xa59
313#define SKX_IRP0_MSR_PMON_BOX_CTL 0xa58
314#define SKX_IRP_MSR_OFFSET 0x20
315
316
317#define SKX_UPI_PCI_PMON_CTL0 0x350
318#define SKX_UPI_PCI_PMON_CTR0 0x318
319#define SKX_UPI_PCI_PMON_BOX_CTL 0x378
320#define SKX_UPI_CTL_UMASK_EXT 0xffefff
321
322
323#define SKX_M2M_PCI_PMON_CTL0 0x228
324#define SKX_M2M_PCI_PMON_CTR0 0x200
325#define SKX_M2M_PCI_PMON_BOX_CTL 0x258
326
327DEFINE_UNCORE_FORMAT_ATTR(event, event, "config:0-7");
328DEFINE_UNCORE_FORMAT_ATTR(event2, event, "config:0-6");
329DEFINE_UNCORE_FORMAT_ATTR(event_ext, event, "config:0-7,21");
330DEFINE_UNCORE_FORMAT_ATTR(use_occ_ctr, use_occ_ctr, "config:7");
331DEFINE_UNCORE_FORMAT_ATTR(umask, umask, "config:8-15");
332DEFINE_UNCORE_FORMAT_ATTR(umask_ext, umask, "config:8-15,32-43,45-55");
333DEFINE_UNCORE_FORMAT_ATTR(qor, qor, "config:16");
334DEFINE_UNCORE_FORMAT_ATTR(edge, edge, "config:18");
335DEFINE_UNCORE_FORMAT_ATTR(tid_en, tid_en, "config:19");
336DEFINE_UNCORE_FORMAT_ATTR(inv, inv, "config:23");
337DEFINE_UNCORE_FORMAT_ATTR(thresh9, thresh, "config:24-35");
338DEFINE_UNCORE_FORMAT_ATTR(thresh8, thresh, "config:24-31");
339DEFINE_UNCORE_FORMAT_ATTR(thresh6, thresh, "config:24-29");
340DEFINE_UNCORE_FORMAT_ATTR(thresh5, thresh, "config:24-28");
341DEFINE_UNCORE_FORMAT_ATTR(occ_sel, occ_sel, "config:14-15");
342DEFINE_UNCORE_FORMAT_ATTR(occ_invert, occ_invert, "config:30");
343DEFINE_UNCORE_FORMAT_ATTR(occ_edge, occ_edge, "config:14-51");
344DEFINE_UNCORE_FORMAT_ATTR(occ_edge_det, occ_edge_det, "config:31");
345DEFINE_UNCORE_FORMAT_ATTR(ch_mask, ch_mask, "config:36-43");
346DEFINE_UNCORE_FORMAT_ATTR(fc_mask, fc_mask, "config:44-46");
347DEFINE_UNCORE_FORMAT_ATTR(filter_tid, filter_tid, "config1:0-4");
348DEFINE_UNCORE_FORMAT_ATTR(filter_tid2, filter_tid, "config1:0");
349DEFINE_UNCORE_FORMAT_ATTR(filter_tid3, filter_tid, "config1:0-5");
350DEFINE_UNCORE_FORMAT_ATTR(filter_tid4, filter_tid, "config1:0-8");
351DEFINE_UNCORE_FORMAT_ATTR(filter_cid, filter_cid, "config1:5");
352DEFINE_UNCORE_FORMAT_ATTR(filter_link, filter_link, "config1:5-8");
353DEFINE_UNCORE_FORMAT_ATTR(filter_link2, filter_link, "config1:6-8");
354DEFINE_UNCORE_FORMAT_ATTR(filter_link3, filter_link, "config1:12");
355DEFINE_UNCORE_FORMAT_ATTR(filter_nid, filter_nid, "config1:10-17");
356DEFINE_UNCORE_FORMAT_ATTR(filter_nid2, filter_nid, "config1:32-47");
357DEFINE_UNCORE_FORMAT_ATTR(filter_state, filter_state, "config1:18-22");
358DEFINE_UNCORE_FORMAT_ATTR(filter_state2, filter_state, "config1:17-22");
359DEFINE_UNCORE_FORMAT_ATTR(filter_state3, filter_state, "config1:17-23");
360DEFINE_UNCORE_FORMAT_ATTR(filter_state4, filter_state, "config1:18-20");
361DEFINE_UNCORE_FORMAT_ATTR(filter_state5, filter_state, "config1:17-26");
362DEFINE_UNCORE_FORMAT_ATTR(filter_rem, filter_rem, "config1:32");
363DEFINE_UNCORE_FORMAT_ATTR(filter_loc, filter_loc, "config1:33");
364DEFINE_UNCORE_FORMAT_ATTR(filter_nm, filter_nm, "config1:36");
365DEFINE_UNCORE_FORMAT_ATTR(filter_not_nm, filter_not_nm, "config1:37");
366DEFINE_UNCORE_FORMAT_ATTR(filter_local, filter_local, "config1:33");
367DEFINE_UNCORE_FORMAT_ATTR(filter_all_op, filter_all_op, "config1:35");
368DEFINE_UNCORE_FORMAT_ATTR(filter_nnm, filter_nnm, "config1:37");
369DEFINE_UNCORE_FORMAT_ATTR(filter_opc, filter_opc, "config1:23-31");
370DEFINE_UNCORE_FORMAT_ATTR(filter_opc2, filter_opc, "config1:52-60");
371DEFINE_UNCORE_FORMAT_ATTR(filter_opc3, filter_opc, "config1:41-60");
372DEFINE_UNCORE_FORMAT_ATTR(filter_opc_0, filter_opc0, "config1:41-50");
373DEFINE_UNCORE_FORMAT_ATTR(filter_opc_1, filter_opc1, "config1:51-60");
374DEFINE_UNCORE_FORMAT_ATTR(filter_nc, filter_nc, "config1:62");
375DEFINE_UNCORE_FORMAT_ATTR(filter_c6, filter_c6, "config1:61");
376DEFINE_UNCORE_FORMAT_ATTR(filter_isoc, filter_isoc, "config1:63");
377DEFINE_UNCORE_FORMAT_ATTR(filter_band0, filter_band0, "config1:0-7");
378DEFINE_UNCORE_FORMAT_ATTR(filter_band1, filter_band1, "config1:8-15");
379DEFINE_UNCORE_FORMAT_ATTR(filter_band2, filter_band2, "config1:16-23");
380DEFINE_UNCORE_FORMAT_ATTR(filter_band3, filter_band3, "config1:24-31");
381DEFINE_UNCORE_FORMAT_ATTR(match_rds, match_rds, "config1:48-51");
382DEFINE_UNCORE_FORMAT_ATTR(match_rnid30, match_rnid30, "config1:32-35");
383DEFINE_UNCORE_FORMAT_ATTR(match_rnid4, match_rnid4, "config1:31");
384DEFINE_UNCORE_FORMAT_ATTR(match_dnid, match_dnid, "config1:13-17");
385DEFINE_UNCORE_FORMAT_ATTR(match_mc, match_mc, "config1:9-12");
386DEFINE_UNCORE_FORMAT_ATTR(match_opc, match_opc, "config1:5-8");
387DEFINE_UNCORE_FORMAT_ATTR(match_vnw, match_vnw, "config1:3-4");
388DEFINE_UNCORE_FORMAT_ATTR(match0, match0, "config1:0-31");
389DEFINE_UNCORE_FORMAT_ATTR(match1, match1, "config1:32-63");
390DEFINE_UNCORE_FORMAT_ATTR(mask_rds, mask_rds, "config2:48-51");
391DEFINE_UNCORE_FORMAT_ATTR(mask_rnid30, mask_rnid30, "config2:32-35");
392DEFINE_UNCORE_FORMAT_ATTR(mask_rnid4, mask_rnid4, "config2:31");
393DEFINE_UNCORE_FORMAT_ATTR(mask_dnid, mask_dnid, "config2:13-17");
394DEFINE_UNCORE_FORMAT_ATTR(mask_mc, mask_mc, "config2:9-12");
395DEFINE_UNCORE_FORMAT_ATTR(mask_opc, mask_opc, "config2:5-8");
396DEFINE_UNCORE_FORMAT_ATTR(mask_vnw, mask_vnw, "config2:3-4");
397DEFINE_UNCORE_FORMAT_ATTR(mask0, mask0, "config2:0-31");
398DEFINE_UNCORE_FORMAT_ATTR(mask1, mask1, "config2:32-63");
399
400static void snbep_uncore_pci_disable_box(struct intel_uncore_box *box)
401{
402 struct pci_dev *pdev = box->pci_dev;
403 int box_ctl = uncore_pci_box_ctl(box);
404 u32 config = 0;
405
406 if (!pci_read_config_dword(pdev, box_ctl, &config)) {
407 config |= SNBEP_PMON_BOX_CTL_FRZ;
408 pci_write_config_dword(pdev, box_ctl, config);
409 }
410}
411
412static void snbep_uncore_pci_enable_box(struct intel_uncore_box *box)
413{
414 struct pci_dev *pdev = box->pci_dev;
415 int box_ctl = uncore_pci_box_ctl(box);
416 u32 config = 0;
417
418 if (!pci_read_config_dword(pdev, box_ctl, &config)) {
419 config &= ~SNBEP_PMON_BOX_CTL_FRZ;
420 pci_write_config_dword(pdev, box_ctl, config);
421 }
422}
423
424static void snbep_uncore_pci_enable_event(struct intel_uncore_box *box, struct perf_event *event)
425{
426 struct pci_dev *pdev = box->pci_dev;
427 struct hw_perf_event *hwc = &event->hw;
428
429 pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
430}
431
432static void snbep_uncore_pci_disable_event(struct intel_uncore_box *box, struct perf_event *event)
433{
434 struct pci_dev *pdev = box->pci_dev;
435 struct hw_perf_event *hwc = &event->hw;
436
437 pci_write_config_dword(pdev, hwc->config_base, hwc->config);
438}
439
440static u64 snbep_uncore_pci_read_counter(struct intel_uncore_box *box, struct perf_event *event)
441{
442 struct pci_dev *pdev = box->pci_dev;
443 struct hw_perf_event *hwc = &event->hw;
444 u64 count = 0;
445
446 pci_read_config_dword(pdev, hwc->event_base, (u32 *)&count);
447 pci_read_config_dword(pdev, hwc->event_base + 4, (u32 *)&count + 1);
448
449 return count;
450}
451
452static void snbep_uncore_pci_init_box(struct intel_uncore_box *box)
453{
454 struct pci_dev *pdev = box->pci_dev;
455 int box_ctl = uncore_pci_box_ctl(box);
456
457 pci_write_config_dword(pdev, box_ctl, SNBEP_PMON_BOX_CTL_INT);
458}
459
460static void snbep_uncore_msr_disable_box(struct intel_uncore_box *box)
461{
462 u64 config;
463 unsigned msr;
464
465 msr = uncore_msr_box_ctl(box);
466 if (msr) {
467 rdmsrl(msr, config);
468 config |= SNBEP_PMON_BOX_CTL_FRZ;
469 wrmsrl(msr, config);
470 }
471}
472
473static void snbep_uncore_msr_enable_box(struct intel_uncore_box *box)
474{
475 u64 config;
476 unsigned msr;
477
478 msr = uncore_msr_box_ctl(box);
479 if (msr) {
480 rdmsrl(msr, config);
481 config &= ~SNBEP_PMON_BOX_CTL_FRZ;
482 wrmsrl(msr, config);
483 }
484}
485
486static void snbep_uncore_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event)
487{
488 struct hw_perf_event *hwc = &event->hw;
489 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
490
491 if (reg1->idx != EXTRA_REG_NONE)
492 wrmsrl(reg1->reg, uncore_shared_reg_config(box, 0));
493
494 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
495}
496
497static void snbep_uncore_msr_disable_event(struct intel_uncore_box *box,
498 struct perf_event *event)
499{
500 struct hw_perf_event *hwc = &event->hw;
501
502 wrmsrl(hwc->config_base, hwc->config);
503}
504
505static void snbep_uncore_msr_init_box(struct intel_uncore_box *box)
506{
507 unsigned msr = uncore_msr_box_ctl(box);
508
509 if (msr)
510 wrmsrl(msr, SNBEP_PMON_BOX_CTL_INT);
511}
512
513static struct attribute *snbep_uncore_formats_attr[] = {
514 &format_attr_event.attr,
515 &format_attr_umask.attr,
516 &format_attr_edge.attr,
517 &format_attr_inv.attr,
518 &format_attr_thresh8.attr,
519 NULL,
520};
521
522static struct attribute *snbep_uncore_ubox_formats_attr[] = {
523 &format_attr_event.attr,
524 &format_attr_umask.attr,
525 &format_attr_edge.attr,
526 &format_attr_inv.attr,
527 &format_attr_thresh5.attr,
528 NULL,
529};
530
531static struct attribute *snbep_uncore_cbox_formats_attr[] = {
532 &format_attr_event.attr,
533 &format_attr_umask.attr,
534 &format_attr_edge.attr,
535 &format_attr_tid_en.attr,
536 &format_attr_inv.attr,
537 &format_attr_thresh8.attr,
538 &format_attr_filter_tid.attr,
539 &format_attr_filter_nid.attr,
540 &format_attr_filter_state.attr,
541 &format_attr_filter_opc.attr,
542 NULL,
543};
544
545static struct attribute *snbep_uncore_pcu_formats_attr[] = {
546 &format_attr_event.attr,
547 &format_attr_occ_sel.attr,
548 &format_attr_edge.attr,
549 &format_attr_inv.attr,
550 &format_attr_thresh5.attr,
551 &format_attr_occ_invert.attr,
552 &format_attr_occ_edge.attr,
553 &format_attr_filter_band0.attr,
554 &format_attr_filter_band1.attr,
555 &format_attr_filter_band2.attr,
556 &format_attr_filter_band3.attr,
557 NULL,
558};
559
560static struct attribute *snbep_uncore_qpi_formats_attr[] = {
561 &format_attr_event_ext.attr,
562 &format_attr_umask.attr,
563 &format_attr_edge.attr,
564 &format_attr_inv.attr,
565 &format_attr_thresh8.attr,
566 &format_attr_match_rds.attr,
567 &format_attr_match_rnid30.attr,
568 &format_attr_match_rnid4.attr,
569 &format_attr_match_dnid.attr,
570 &format_attr_match_mc.attr,
571 &format_attr_match_opc.attr,
572 &format_attr_match_vnw.attr,
573 &format_attr_match0.attr,
574 &format_attr_match1.attr,
575 &format_attr_mask_rds.attr,
576 &format_attr_mask_rnid30.attr,
577 &format_attr_mask_rnid4.attr,
578 &format_attr_mask_dnid.attr,
579 &format_attr_mask_mc.attr,
580 &format_attr_mask_opc.attr,
581 &format_attr_mask_vnw.attr,
582 &format_attr_mask0.attr,
583 &format_attr_mask1.attr,
584 NULL,
585};
586
587static struct uncore_event_desc snbep_uncore_imc_events[] = {
588 INTEL_UNCORE_EVENT_DESC(clockticks, "event=0xff,umask=0x00"),
589 INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"),
590 INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"),
591 INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"),
592 INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"),
593 INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"),
594 INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"),
595 { },
596};
597
598static struct uncore_event_desc snbep_uncore_qpi_events[] = {
599 INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x14"),
600 INTEL_UNCORE_EVENT_DESC(txl_flits_active, "event=0x00,umask=0x06"),
601 INTEL_UNCORE_EVENT_DESC(drs_data, "event=0x102,umask=0x08"),
602 INTEL_UNCORE_EVENT_DESC(ncb_data, "event=0x103,umask=0x04"),
603 { },
604};
605
606static const struct attribute_group snbep_uncore_format_group = {
607 .name = "format",
608 .attrs = snbep_uncore_formats_attr,
609};
610
611static const struct attribute_group snbep_uncore_ubox_format_group = {
612 .name = "format",
613 .attrs = snbep_uncore_ubox_formats_attr,
614};
615
616static const struct attribute_group snbep_uncore_cbox_format_group = {
617 .name = "format",
618 .attrs = snbep_uncore_cbox_formats_attr,
619};
620
621static const struct attribute_group snbep_uncore_pcu_format_group = {
622 .name = "format",
623 .attrs = snbep_uncore_pcu_formats_attr,
624};
625
626static const struct attribute_group snbep_uncore_qpi_format_group = {
627 .name = "format",
628 .attrs = snbep_uncore_qpi_formats_attr,
629};
630
631#define __SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \
632 .disable_box = snbep_uncore_msr_disable_box, \
633 .enable_box = snbep_uncore_msr_enable_box, \
634 .disable_event = snbep_uncore_msr_disable_event, \
635 .enable_event = snbep_uncore_msr_enable_event, \
636 .read_counter = uncore_msr_read_counter
637
638#define SNBEP_UNCORE_MSR_OPS_COMMON_INIT() \
639 __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(), \
640 .init_box = snbep_uncore_msr_init_box \
641
642static struct intel_uncore_ops snbep_uncore_msr_ops = {
643 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
644};
645
646#define SNBEP_UNCORE_PCI_OPS_COMMON_INIT() \
647 .init_box = snbep_uncore_pci_init_box, \
648 .disable_box = snbep_uncore_pci_disable_box, \
649 .enable_box = snbep_uncore_pci_enable_box, \
650 .disable_event = snbep_uncore_pci_disable_event, \
651 .read_counter = snbep_uncore_pci_read_counter
652
653static struct intel_uncore_ops snbep_uncore_pci_ops = {
654 SNBEP_UNCORE_PCI_OPS_COMMON_INIT(),
655 .enable_event = snbep_uncore_pci_enable_event, \
656};
657
658static struct event_constraint snbep_uncore_cbox_constraints[] = {
659 UNCORE_EVENT_CONSTRAINT(0x01, 0x1),
660 UNCORE_EVENT_CONSTRAINT(0x02, 0x3),
661 UNCORE_EVENT_CONSTRAINT(0x04, 0x3),
662 UNCORE_EVENT_CONSTRAINT(0x05, 0x3),
663 UNCORE_EVENT_CONSTRAINT(0x07, 0x3),
664 UNCORE_EVENT_CONSTRAINT(0x09, 0x3),
665 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
666 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
667 UNCORE_EVENT_CONSTRAINT(0x13, 0x3),
668 UNCORE_EVENT_CONSTRAINT(0x1b, 0xc),
669 UNCORE_EVENT_CONSTRAINT(0x1c, 0xc),
670 UNCORE_EVENT_CONSTRAINT(0x1d, 0xc),
671 UNCORE_EVENT_CONSTRAINT(0x1e, 0xc),
672 UNCORE_EVENT_CONSTRAINT(0x1f, 0xe),
673 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
674 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
675 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
676 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
677 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
678 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
679 UNCORE_EVENT_CONSTRAINT(0x35, 0x3),
680 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
681 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
682 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
683 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
684 UNCORE_EVENT_CONSTRAINT(0x3b, 0x1),
685 EVENT_CONSTRAINT_END
686};
687
688static struct event_constraint snbep_uncore_r2pcie_constraints[] = {
689 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
690 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
691 UNCORE_EVENT_CONSTRAINT(0x12, 0x1),
692 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
693 UNCORE_EVENT_CONSTRAINT(0x24, 0x3),
694 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
695 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
696 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
697 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
698 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
699 EVENT_CONSTRAINT_END
700};
701
702static struct event_constraint snbep_uncore_r3qpi_constraints[] = {
703 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
704 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
705 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
706 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
707 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
708 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
709 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
710 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
711 UNCORE_EVENT_CONSTRAINT(0x24, 0x3),
712 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
713 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
714 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
715 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
716 UNCORE_EVENT_CONSTRAINT(0x2a, 0x3),
717 UNCORE_EVENT_CONSTRAINT(0x2b, 0x3),
718 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
719 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
720 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
721 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
722 UNCORE_EVENT_CONSTRAINT(0x30, 0x3),
723 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
724 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
725 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
726 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
727 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
728 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
729 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
730 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
731 EVENT_CONSTRAINT_END
732};
733
734static struct intel_uncore_type snbep_uncore_ubox = {
735 .name = "ubox",
736 .num_counters = 2,
737 .num_boxes = 1,
738 .perf_ctr_bits = 44,
739 .fixed_ctr_bits = 48,
740 .perf_ctr = SNBEP_U_MSR_PMON_CTR0,
741 .event_ctl = SNBEP_U_MSR_PMON_CTL0,
742 .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK,
743 .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR,
744 .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL,
745 .ops = &snbep_uncore_msr_ops,
746 .format_group = &snbep_uncore_ubox_format_group,
747};
748
749static struct extra_reg snbep_uncore_cbox_extra_regs[] = {
750 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
751 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
752 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
753 SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0x6),
754 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
755 SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0x6),
756 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
757 SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0x6),
758 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x6),
759 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x8),
760 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x8),
761 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0xa),
762 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0xa),
763 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x2),
764 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x2),
765 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x2),
766 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x2),
767 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x8),
768 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x8),
769 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0xa),
770 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0xa),
771 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x2),
772 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x2),
773 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x2),
774 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x2),
775 EVENT_EXTRA_END
776};
777
778static void snbep_cbox_put_constraint(struct intel_uncore_box *box, struct perf_event *event)
779{
780 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
781 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
782 int i;
783
784 if (uncore_box_is_fake(box))
785 return;
786
787 for (i = 0; i < 5; i++) {
788 if (reg1->alloc & (0x1 << i))
789 atomic_sub(1 << (i * 6), &er->ref);
790 }
791 reg1->alloc = 0;
792}
793
794static struct event_constraint *
795__snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event,
796 u64 (*cbox_filter_mask)(int fields))
797{
798 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
799 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
800 int i, alloc = 0;
801 unsigned long flags;
802 u64 mask;
803
804 if (reg1->idx == EXTRA_REG_NONE)
805 return NULL;
806
807 raw_spin_lock_irqsave(&er->lock, flags);
808 for (i = 0; i < 5; i++) {
809 if (!(reg1->idx & (0x1 << i)))
810 continue;
811 if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i)))
812 continue;
813
814 mask = cbox_filter_mask(0x1 << i);
815 if (!__BITS_VALUE(atomic_read(&er->ref), i, 6) ||
816 !((reg1->config ^ er->config) & mask)) {
817 atomic_add(1 << (i * 6), &er->ref);
818 er->config &= ~mask;
819 er->config |= reg1->config & mask;
820 alloc |= (0x1 << i);
821 } else {
822 break;
823 }
824 }
825 raw_spin_unlock_irqrestore(&er->lock, flags);
826 if (i < 5)
827 goto fail;
828
829 if (!uncore_box_is_fake(box))
830 reg1->alloc |= alloc;
831
832 return NULL;
833fail:
834 for (; i >= 0; i--) {
835 if (alloc & (0x1 << i))
836 atomic_sub(1 << (i * 6), &er->ref);
837 }
838 return &uncore_constraint_empty;
839}
840
841static u64 snbep_cbox_filter_mask(int fields)
842{
843 u64 mask = 0;
844
845 if (fields & 0x1)
846 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_TID;
847 if (fields & 0x2)
848 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_NID;
849 if (fields & 0x4)
850 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_STATE;
851 if (fields & 0x8)
852 mask |= SNBEP_CB0_MSR_PMON_BOX_FILTER_OPC;
853
854 return mask;
855}
856
857static struct event_constraint *
858snbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
859{
860 return __snbep_cbox_get_constraint(box, event, snbep_cbox_filter_mask);
861}
862
863static int snbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
864{
865 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
866 struct extra_reg *er;
867 int idx = 0;
868
869 for (er = snbep_uncore_cbox_extra_regs; er->msr; er++) {
870 if (er->event != (event->hw.config & er->config_mask))
871 continue;
872 idx |= er->idx;
873 }
874
875 if (idx) {
876 reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER +
877 SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx;
878 reg1->config = event->attr.config1 & snbep_cbox_filter_mask(idx);
879 reg1->idx = idx;
880 }
881 return 0;
882}
883
884static struct intel_uncore_ops snbep_uncore_cbox_ops = {
885 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
886 .hw_config = snbep_cbox_hw_config,
887 .get_constraint = snbep_cbox_get_constraint,
888 .put_constraint = snbep_cbox_put_constraint,
889};
890
891static struct intel_uncore_type snbep_uncore_cbox = {
892 .name = "cbox",
893 .num_counters = 4,
894 .num_boxes = 8,
895 .perf_ctr_bits = 44,
896 .event_ctl = SNBEP_C0_MSR_PMON_CTL0,
897 .perf_ctr = SNBEP_C0_MSR_PMON_CTR0,
898 .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
899 .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL,
900 .msr_offset = SNBEP_CBO_MSR_OFFSET,
901 .num_shared_regs = 1,
902 .constraints = snbep_uncore_cbox_constraints,
903 .ops = &snbep_uncore_cbox_ops,
904 .format_group = &snbep_uncore_cbox_format_group,
905};
906
907static u64 snbep_pcu_alter_er(struct perf_event *event, int new_idx, bool modify)
908{
909 struct hw_perf_event *hwc = &event->hw;
910 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
911 u64 config = reg1->config;
912
913 if (new_idx > reg1->idx)
914 config <<= 8 * (new_idx - reg1->idx);
915 else
916 config >>= 8 * (reg1->idx - new_idx);
917
918 if (modify) {
919 hwc->config += new_idx - reg1->idx;
920 reg1->config = config;
921 reg1->idx = new_idx;
922 }
923 return config;
924}
925
926static struct event_constraint *
927snbep_pcu_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
928{
929 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
930 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
931 unsigned long flags;
932 int idx = reg1->idx;
933 u64 mask, config1 = reg1->config;
934 bool ok = false;
935
936 if (reg1->idx == EXTRA_REG_NONE ||
937 (!uncore_box_is_fake(box) && reg1->alloc))
938 return NULL;
939again:
940 mask = 0xffULL << (idx * 8);
941 raw_spin_lock_irqsave(&er->lock, flags);
942 if (!__BITS_VALUE(atomic_read(&er->ref), idx, 8) ||
943 !((config1 ^ er->config) & mask)) {
944 atomic_add(1 << (idx * 8), &er->ref);
945 er->config &= ~mask;
946 er->config |= config1 & mask;
947 ok = true;
948 }
949 raw_spin_unlock_irqrestore(&er->lock, flags);
950
951 if (!ok) {
952 idx = (idx + 1) % 4;
953 if (idx != reg1->idx) {
954 config1 = snbep_pcu_alter_er(event, idx, false);
955 goto again;
956 }
957 return &uncore_constraint_empty;
958 }
959
960 if (!uncore_box_is_fake(box)) {
961 if (idx != reg1->idx)
962 snbep_pcu_alter_er(event, idx, true);
963 reg1->alloc = 1;
964 }
965 return NULL;
966}
967
968static void snbep_pcu_put_constraint(struct intel_uncore_box *box, struct perf_event *event)
969{
970 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
971 struct intel_uncore_extra_reg *er = &box->shared_regs[0];
972
973 if (uncore_box_is_fake(box) || !reg1->alloc)
974 return;
975
976 atomic_sub(1 << (reg1->idx * 8), &er->ref);
977 reg1->alloc = 0;
978}
979
980static int snbep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event)
981{
982 struct hw_perf_event *hwc = &event->hw;
983 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
984 int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK;
985
986 if (ev_sel >= 0xb && ev_sel <= 0xe) {
987 reg1->reg = SNBEP_PCU_MSR_PMON_BOX_FILTER;
988 reg1->idx = ev_sel - 0xb;
989 reg1->config = event->attr.config1 & (0xff << (reg1->idx * 8));
990 }
991 return 0;
992}
993
994static struct intel_uncore_ops snbep_uncore_pcu_ops = {
995 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
996 .hw_config = snbep_pcu_hw_config,
997 .get_constraint = snbep_pcu_get_constraint,
998 .put_constraint = snbep_pcu_put_constraint,
999};
1000
1001static struct intel_uncore_type snbep_uncore_pcu = {
1002 .name = "pcu",
1003 .num_counters = 4,
1004 .num_boxes = 1,
1005 .perf_ctr_bits = 48,
1006 .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0,
1007 .event_ctl = SNBEP_PCU_MSR_PMON_CTL0,
1008 .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK,
1009 .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL,
1010 .num_shared_regs = 1,
1011 .ops = &snbep_uncore_pcu_ops,
1012 .format_group = &snbep_uncore_pcu_format_group,
1013};
1014
1015static struct intel_uncore_type *snbep_msr_uncores[] = {
1016 &snbep_uncore_ubox,
1017 &snbep_uncore_cbox,
1018 &snbep_uncore_pcu,
1019 NULL,
1020};
1021
1022void snbep_uncore_cpu_init(void)
1023{
1024 if (snbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
1025 snbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
1026 uncore_msr_uncores = snbep_msr_uncores;
1027}
1028
1029enum {
1030 SNBEP_PCI_QPI_PORT0_FILTER,
1031 SNBEP_PCI_QPI_PORT1_FILTER,
1032 BDX_PCI_QPI_PORT2_FILTER,
1033 HSWEP_PCI_PCU_3,
1034};
1035
1036static int snbep_qpi_hw_config(struct intel_uncore_box *box, struct perf_event *event)
1037{
1038 struct hw_perf_event *hwc = &event->hw;
1039 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
1040 struct hw_perf_event_extra *reg2 = &hwc->branch_reg;
1041
1042 if ((hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK) == 0x38) {
1043 reg1->idx = 0;
1044 reg1->reg = SNBEP_Q_Py_PCI_PMON_PKT_MATCH0;
1045 reg1->config = event->attr.config1;
1046 reg2->reg = SNBEP_Q_Py_PCI_PMON_PKT_MASK0;
1047 reg2->config = event->attr.config2;
1048 }
1049 return 0;
1050}
1051
1052static void snbep_qpi_enable_event(struct intel_uncore_box *box, struct perf_event *event)
1053{
1054 struct pci_dev *pdev = box->pci_dev;
1055 struct hw_perf_event *hwc = &event->hw;
1056 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
1057 struct hw_perf_event_extra *reg2 = &hwc->branch_reg;
1058
1059 if (reg1->idx != EXTRA_REG_NONE) {
1060 int idx = box->pmu->pmu_idx + SNBEP_PCI_QPI_PORT0_FILTER;
1061 int pkg = box->pkgid;
1062 struct pci_dev *filter_pdev = uncore_extra_pci_dev[pkg].dev[idx];
1063
1064 if (filter_pdev) {
1065 pci_write_config_dword(filter_pdev, reg1->reg,
1066 (u32)reg1->config);
1067 pci_write_config_dword(filter_pdev, reg1->reg + 4,
1068 (u32)(reg1->config >> 32));
1069 pci_write_config_dword(filter_pdev, reg2->reg,
1070 (u32)reg2->config);
1071 pci_write_config_dword(filter_pdev, reg2->reg + 4,
1072 (u32)(reg2->config >> 32));
1073 }
1074 }
1075
1076 pci_write_config_dword(pdev, hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
1077}
1078
1079static struct intel_uncore_ops snbep_uncore_qpi_ops = {
1080 SNBEP_UNCORE_PCI_OPS_COMMON_INIT(),
1081 .enable_event = snbep_qpi_enable_event,
1082 .hw_config = snbep_qpi_hw_config,
1083 .get_constraint = uncore_get_constraint,
1084 .put_constraint = uncore_put_constraint,
1085};
1086
1087#define SNBEP_UNCORE_PCI_COMMON_INIT() \
1088 .perf_ctr = SNBEP_PCI_PMON_CTR0, \
1089 .event_ctl = SNBEP_PCI_PMON_CTL0, \
1090 .event_mask = SNBEP_PMON_RAW_EVENT_MASK, \
1091 .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \
1092 .ops = &snbep_uncore_pci_ops, \
1093 .format_group = &snbep_uncore_format_group
1094
1095static struct intel_uncore_type snbep_uncore_ha = {
1096 .name = "ha",
1097 .num_counters = 4,
1098 .num_boxes = 1,
1099 .perf_ctr_bits = 48,
1100 SNBEP_UNCORE_PCI_COMMON_INIT(),
1101};
1102
1103static struct intel_uncore_type snbep_uncore_imc = {
1104 .name = "imc",
1105 .num_counters = 4,
1106 .num_boxes = 4,
1107 .perf_ctr_bits = 48,
1108 .fixed_ctr_bits = 48,
1109 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
1110 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
1111 .event_descs = snbep_uncore_imc_events,
1112 SNBEP_UNCORE_PCI_COMMON_INIT(),
1113};
1114
1115static struct intel_uncore_type snbep_uncore_qpi = {
1116 .name = "qpi",
1117 .num_counters = 4,
1118 .num_boxes = 2,
1119 .perf_ctr_bits = 48,
1120 .perf_ctr = SNBEP_PCI_PMON_CTR0,
1121 .event_ctl = SNBEP_PCI_PMON_CTL0,
1122 .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
1123 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
1124 .num_shared_regs = 1,
1125 .ops = &snbep_uncore_qpi_ops,
1126 .event_descs = snbep_uncore_qpi_events,
1127 .format_group = &snbep_uncore_qpi_format_group,
1128};
1129
1130
1131static struct intel_uncore_type snbep_uncore_r2pcie = {
1132 .name = "r2pcie",
1133 .num_counters = 4,
1134 .num_boxes = 1,
1135 .perf_ctr_bits = 44,
1136 .constraints = snbep_uncore_r2pcie_constraints,
1137 SNBEP_UNCORE_PCI_COMMON_INIT(),
1138};
1139
1140static struct intel_uncore_type snbep_uncore_r3qpi = {
1141 .name = "r3qpi",
1142 .num_counters = 3,
1143 .num_boxes = 2,
1144 .perf_ctr_bits = 44,
1145 .constraints = snbep_uncore_r3qpi_constraints,
1146 SNBEP_UNCORE_PCI_COMMON_INIT(),
1147};
1148
1149enum {
1150 SNBEP_PCI_UNCORE_HA,
1151 SNBEP_PCI_UNCORE_IMC,
1152 SNBEP_PCI_UNCORE_QPI,
1153 SNBEP_PCI_UNCORE_R2PCIE,
1154 SNBEP_PCI_UNCORE_R3QPI,
1155};
1156
1157static struct intel_uncore_type *snbep_pci_uncores[] = {
1158 [SNBEP_PCI_UNCORE_HA] = &snbep_uncore_ha,
1159 [SNBEP_PCI_UNCORE_IMC] = &snbep_uncore_imc,
1160 [SNBEP_PCI_UNCORE_QPI] = &snbep_uncore_qpi,
1161 [SNBEP_PCI_UNCORE_R2PCIE] = &snbep_uncore_r2pcie,
1162 [SNBEP_PCI_UNCORE_R3QPI] = &snbep_uncore_r3qpi,
1163 NULL,
1164};
1165
1166static const struct pci_device_id snbep_uncore_pci_ids[] = {
1167 {
1168 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_HA),
1169 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_HA, 0),
1170 },
1171 {
1172 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC0),
1173 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 0),
1174 },
1175 {
1176 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC1),
1177 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 1),
1178 },
1179 {
1180 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC2),
1181 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 2),
1182 },
1183 {
1184 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_IMC3),
1185 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_IMC, 3),
1186 },
1187 {
1188 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI0),
1189 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 0),
1190 },
1191 {
1192 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_QPI1),
1193 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_QPI, 1),
1194 },
1195 {
1196 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R2PCIE),
1197 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R2PCIE, 0),
1198 },
1199 {
1200 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI0),
1201 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 0),
1202 },
1203 {
1204 PCI_DEVICE(PCI_VENDOR_ID_INTEL, PCI_DEVICE_ID_INTEL_UNC_R3QPI1),
1205 .driver_data = UNCORE_PCI_DEV_DATA(SNBEP_PCI_UNCORE_R3QPI, 1),
1206 },
1207 {
1208 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c86),
1209 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1210 SNBEP_PCI_QPI_PORT0_FILTER),
1211 },
1212 {
1213 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x3c96),
1214 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1215 SNBEP_PCI_QPI_PORT1_FILTER),
1216 },
1217 { }
1218};
1219
1220static struct pci_driver snbep_uncore_pci_driver = {
1221 .name = "snbep_uncore",
1222 .id_table = snbep_uncore_pci_ids,
1223};
1224
1225
1226
1227
1228static int snbep_pci2phy_map_init(int devid, int nodeid_loc, int idmap_loc, bool reverse)
1229{
1230 struct pci_dev *ubox_dev = NULL;
1231 int i, bus, nodeid, segment;
1232 struct pci2phy_map *map;
1233 int err = 0;
1234 u32 config = 0;
1235
1236 while (1) {
1237
1238 ubox_dev = pci_get_device(PCI_VENDOR_ID_INTEL, devid, ubox_dev);
1239 if (!ubox_dev)
1240 break;
1241 bus = ubox_dev->bus->number;
1242
1243 err = pci_read_config_dword(ubox_dev, nodeid_loc, &config);
1244 if (err)
1245 break;
1246 nodeid = config;
1247
1248 err = pci_read_config_dword(ubox_dev, idmap_loc, &config);
1249 if (err)
1250 break;
1251
1252 segment = pci_domain_nr(ubox_dev->bus);
1253 raw_spin_lock(&pci2phy_map_lock);
1254 map = __find_pci2phy_map(segment);
1255 if (!map) {
1256 raw_spin_unlock(&pci2phy_map_lock);
1257 err = -ENOMEM;
1258 break;
1259 }
1260
1261
1262
1263
1264
1265 for (i = 0; i < 8; i++) {
1266 if (nodeid == ((config >> (3 * i)) & 0x7)) {
1267 map->pbus_to_physid[bus] = i;
1268 break;
1269 }
1270 }
1271 raw_spin_unlock(&pci2phy_map_lock);
1272 }
1273
1274 if (!err) {
1275
1276
1277
1278
1279 raw_spin_lock(&pci2phy_map_lock);
1280 list_for_each_entry(map, &pci2phy_map_head, list) {
1281 i = -1;
1282 if (reverse) {
1283 for (bus = 255; bus >= 0; bus--) {
1284 if (map->pbus_to_physid[bus] >= 0)
1285 i = map->pbus_to_physid[bus];
1286 else
1287 map->pbus_to_physid[bus] = i;
1288 }
1289 } else {
1290 for (bus = 0; bus <= 255; bus++) {
1291 if (map->pbus_to_physid[bus] >= 0)
1292 i = map->pbus_to_physid[bus];
1293 else
1294 map->pbus_to_physid[bus] = i;
1295 }
1296 }
1297 }
1298 raw_spin_unlock(&pci2phy_map_lock);
1299 }
1300
1301 pci_dev_put(ubox_dev);
1302
1303 return err ? pcibios_err_to_errno(err) : 0;
1304}
1305
1306int snbep_uncore_pci_init(void)
1307{
1308 int ret = snbep_pci2phy_map_init(0x3ce0, SNBEP_CPUNODEID, SNBEP_GIDNIDMAP, true);
1309 if (ret)
1310 return ret;
1311 uncore_pci_uncores = snbep_pci_uncores;
1312 uncore_pci_driver = &snbep_uncore_pci_driver;
1313 return 0;
1314}
1315
1316
1317
1318static void ivbep_uncore_msr_init_box(struct intel_uncore_box *box)
1319{
1320 unsigned msr = uncore_msr_box_ctl(box);
1321 if (msr)
1322 wrmsrl(msr, IVBEP_PMON_BOX_CTL_INT);
1323}
1324
1325static void ivbep_uncore_pci_init_box(struct intel_uncore_box *box)
1326{
1327 struct pci_dev *pdev = box->pci_dev;
1328
1329 pci_write_config_dword(pdev, SNBEP_PCI_PMON_BOX_CTL, IVBEP_PMON_BOX_CTL_INT);
1330}
1331
1332#define IVBEP_UNCORE_MSR_OPS_COMMON_INIT() \
1333 .init_box = ivbep_uncore_msr_init_box, \
1334 .disable_box = snbep_uncore_msr_disable_box, \
1335 .enable_box = snbep_uncore_msr_enable_box, \
1336 .disable_event = snbep_uncore_msr_disable_event, \
1337 .enable_event = snbep_uncore_msr_enable_event, \
1338 .read_counter = uncore_msr_read_counter
1339
1340static struct intel_uncore_ops ivbep_uncore_msr_ops = {
1341 IVBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1342};
1343
1344static struct intel_uncore_ops ivbep_uncore_pci_ops = {
1345 .init_box = ivbep_uncore_pci_init_box,
1346 .disable_box = snbep_uncore_pci_disable_box,
1347 .enable_box = snbep_uncore_pci_enable_box,
1348 .disable_event = snbep_uncore_pci_disable_event,
1349 .enable_event = snbep_uncore_pci_enable_event,
1350 .read_counter = snbep_uncore_pci_read_counter,
1351};
1352
1353#define IVBEP_UNCORE_PCI_COMMON_INIT() \
1354 .perf_ctr = SNBEP_PCI_PMON_CTR0, \
1355 .event_ctl = SNBEP_PCI_PMON_CTL0, \
1356 .event_mask = IVBEP_PMON_RAW_EVENT_MASK, \
1357 .box_ctl = SNBEP_PCI_PMON_BOX_CTL, \
1358 .ops = &ivbep_uncore_pci_ops, \
1359 .format_group = &ivbep_uncore_format_group
1360
1361static struct attribute *ivbep_uncore_formats_attr[] = {
1362 &format_attr_event.attr,
1363 &format_attr_umask.attr,
1364 &format_attr_edge.attr,
1365 &format_attr_inv.attr,
1366 &format_attr_thresh8.attr,
1367 NULL,
1368};
1369
1370static struct attribute *ivbep_uncore_ubox_formats_attr[] = {
1371 &format_attr_event.attr,
1372 &format_attr_umask.attr,
1373 &format_attr_edge.attr,
1374 &format_attr_inv.attr,
1375 &format_attr_thresh5.attr,
1376 NULL,
1377};
1378
1379static struct attribute *ivbep_uncore_cbox_formats_attr[] = {
1380 &format_attr_event.attr,
1381 &format_attr_umask.attr,
1382 &format_attr_edge.attr,
1383 &format_attr_tid_en.attr,
1384 &format_attr_thresh8.attr,
1385 &format_attr_filter_tid.attr,
1386 &format_attr_filter_link.attr,
1387 &format_attr_filter_state2.attr,
1388 &format_attr_filter_nid2.attr,
1389 &format_attr_filter_opc2.attr,
1390 &format_attr_filter_nc.attr,
1391 &format_attr_filter_c6.attr,
1392 &format_attr_filter_isoc.attr,
1393 NULL,
1394};
1395
1396static struct attribute *ivbep_uncore_pcu_formats_attr[] = {
1397 &format_attr_event.attr,
1398 &format_attr_occ_sel.attr,
1399 &format_attr_edge.attr,
1400 &format_attr_thresh5.attr,
1401 &format_attr_occ_invert.attr,
1402 &format_attr_occ_edge.attr,
1403 &format_attr_filter_band0.attr,
1404 &format_attr_filter_band1.attr,
1405 &format_attr_filter_band2.attr,
1406 &format_attr_filter_band3.attr,
1407 NULL,
1408};
1409
1410static struct attribute *ivbep_uncore_qpi_formats_attr[] = {
1411 &format_attr_event_ext.attr,
1412 &format_attr_umask.attr,
1413 &format_attr_edge.attr,
1414 &format_attr_thresh8.attr,
1415 &format_attr_match_rds.attr,
1416 &format_attr_match_rnid30.attr,
1417 &format_attr_match_rnid4.attr,
1418 &format_attr_match_dnid.attr,
1419 &format_attr_match_mc.attr,
1420 &format_attr_match_opc.attr,
1421 &format_attr_match_vnw.attr,
1422 &format_attr_match0.attr,
1423 &format_attr_match1.attr,
1424 &format_attr_mask_rds.attr,
1425 &format_attr_mask_rnid30.attr,
1426 &format_attr_mask_rnid4.attr,
1427 &format_attr_mask_dnid.attr,
1428 &format_attr_mask_mc.attr,
1429 &format_attr_mask_opc.attr,
1430 &format_attr_mask_vnw.attr,
1431 &format_attr_mask0.attr,
1432 &format_attr_mask1.attr,
1433 NULL,
1434};
1435
1436static const struct attribute_group ivbep_uncore_format_group = {
1437 .name = "format",
1438 .attrs = ivbep_uncore_formats_attr,
1439};
1440
1441static const struct attribute_group ivbep_uncore_ubox_format_group = {
1442 .name = "format",
1443 .attrs = ivbep_uncore_ubox_formats_attr,
1444};
1445
1446static const struct attribute_group ivbep_uncore_cbox_format_group = {
1447 .name = "format",
1448 .attrs = ivbep_uncore_cbox_formats_attr,
1449};
1450
1451static const struct attribute_group ivbep_uncore_pcu_format_group = {
1452 .name = "format",
1453 .attrs = ivbep_uncore_pcu_formats_attr,
1454};
1455
1456static const struct attribute_group ivbep_uncore_qpi_format_group = {
1457 .name = "format",
1458 .attrs = ivbep_uncore_qpi_formats_attr,
1459};
1460
1461static struct intel_uncore_type ivbep_uncore_ubox = {
1462 .name = "ubox",
1463 .num_counters = 2,
1464 .num_boxes = 1,
1465 .perf_ctr_bits = 44,
1466 .fixed_ctr_bits = 48,
1467 .perf_ctr = SNBEP_U_MSR_PMON_CTR0,
1468 .event_ctl = SNBEP_U_MSR_PMON_CTL0,
1469 .event_mask = IVBEP_U_MSR_PMON_RAW_EVENT_MASK,
1470 .fixed_ctr = SNBEP_U_MSR_PMON_UCLK_FIXED_CTR,
1471 .fixed_ctl = SNBEP_U_MSR_PMON_UCLK_FIXED_CTL,
1472 .ops = &ivbep_uncore_msr_ops,
1473 .format_group = &ivbep_uncore_ubox_format_group,
1474};
1475
1476static struct extra_reg ivbep_uncore_cbox_extra_regs[] = {
1477 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
1478 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
1479 SNBEP_CBO_EVENT_EXTRA_REG(0x1031, 0x10ff, 0x2),
1480 SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4),
1481 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0xc),
1482 SNBEP_CBO_EVENT_EXTRA_REG(0x5134, 0xffff, 0xc),
1483 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
1484 SNBEP_CBO_EVENT_EXTRA_REG(0x4334, 0xffff, 0xc),
1485 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
1486 SNBEP_CBO_EVENT_EXTRA_REG(0x4534, 0xffff, 0xc),
1487 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
1488 SNBEP_CBO_EVENT_EXTRA_REG(0x4934, 0xffff, 0xc),
1489 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x10),
1490 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10),
1491 SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10),
1492 SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10),
1493 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18),
1494 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18),
1495 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8),
1496 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8),
1497 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8),
1498 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8),
1499 SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10),
1500 SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10),
1501 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10),
1502 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10),
1503 SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10),
1504 SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10),
1505 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18),
1506 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18),
1507 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8),
1508 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8),
1509 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8),
1510 SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8),
1511 SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10),
1512 SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10),
1513 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8),
1514 EVENT_EXTRA_END
1515};
1516
1517static u64 ivbep_cbox_filter_mask(int fields)
1518{
1519 u64 mask = 0;
1520
1521 if (fields & 0x1)
1522 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_TID;
1523 if (fields & 0x2)
1524 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_LINK;
1525 if (fields & 0x4)
1526 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_STATE;
1527 if (fields & 0x8)
1528 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NID;
1529 if (fields & 0x10) {
1530 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_OPC;
1531 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_NC;
1532 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_C6;
1533 mask |= IVBEP_CB0_MSR_PMON_BOX_FILTER_ISOC;
1534 }
1535
1536 return mask;
1537}
1538
1539static struct event_constraint *
1540ivbep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
1541{
1542 return __snbep_cbox_get_constraint(box, event, ivbep_cbox_filter_mask);
1543}
1544
1545static int ivbep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
1546{
1547 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
1548 struct extra_reg *er;
1549 int idx = 0;
1550
1551 for (er = ivbep_uncore_cbox_extra_regs; er->msr; er++) {
1552 if (er->event != (event->hw.config & er->config_mask))
1553 continue;
1554 idx |= er->idx;
1555 }
1556
1557 if (idx) {
1558 reg1->reg = SNBEP_C0_MSR_PMON_BOX_FILTER +
1559 SNBEP_CBO_MSR_OFFSET * box->pmu->pmu_idx;
1560 reg1->config = event->attr.config1 & ivbep_cbox_filter_mask(idx);
1561 reg1->idx = idx;
1562 }
1563 return 0;
1564}
1565
1566static void ivbep_cbox_enable_event(struct intel_uncore_box *box, struct perf_event *event)
1567{
1568 struct hw_perf_event *hwc = &event->hw;
1569 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
1570
1571 if (reg1->idx != EXTRA_REG_NONE) {
1572 u64 filter = uncore_shared_reg_config(box, 0);
1573 wrmsrl(reg1->reg, filter & 0xffffffff);
1574 wrmsrl(reg1->reg + 6, filter >> 32);
1575 }
1576
1577 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
1578}
1579
1580static struct intel_uncore_ops ivbep_uncore_cbox_ops = {
1581 .init_box = ivbep_uncore_msr_init_box,
1582 .disable_box = snbep_uncore_msr_disable_box,
1583 .enable_box = snbep_uncore_msr_enable_box,
1584 .disable_event = snbep_uncore_msr_disable_event,
1585 .enable_event = ivbep_cbox_enable_event,
1586 .read_counter = uncore_msr_read_counter,
1587 .hw_config = ivbep_cbox_hw_config,
1588 .get_constraint = ivbep_cbox_get_constraint,
1589 .put_constraint = snbep_cbox_put_constraint,
1590};
1591
1592static struct intel_uncore_type ivbep_uncore_cbox = {
1593 .name = "cbox",
1594 .num_counters = 4,
1595 .num_boxes = 15,
1596 .perf_ctr_bits = 44,
1597 .event_ctl = SNBEP_C0_MSR_PMON_CTL0,
1598 .perf_ctr = SNBEP_C0_MSR_PMON_CTR0,
1599 .event_mask = IVBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
1600 .box_ctl = SNBEP_C0_MSR_PMON_BOX_CTL,
1601 .msr_offset = SNBEP_CBO_MSR_OFFSET,
1602 .num_shared_regs = 1,
1603 .constraints = snbep_uncore_cbox_constraints,
1604 .ops = &ivbep_uncore_cbox_ops,
1605 .format_group = &ivbep_uncore_cbox_format_group,
1606};
1607
1608static struct intel_uncore_ops ivbep_uncore_pcu_ops = {
1609 IVBEP_UNCORE_MSR_OPS_COMMON_INIT(),
1610 .hw_config = snbep_pcu_hw_config,
1611 .get_constraint = snbep_pcu_get_constraint,
1612 .put_constraint = snbep_pcu_put_constraint,
1613};
1614
1615static struct intel_uncore_type ivbep_uncore_pcu = {
1616 .name = "pcu",
1617 .num_counters = 4,
1618 .num_boxes = 1,
1619 .perf_ctr_bits = 48,
1620 .perf_ctr = SNBEP_PCU_MSR_PMON_CTR0,
1621 .event_ctl = SNBEP_PCU_MSR_PMON_CTL0,
1622 .event_mask = IVBEP_PCU_MSR_PMON_RAW_EVENT_MASK,
1623 .box_ctl = SNBEP_PCU_MSR_PMON_BOX_CTL,
1624 .num_shared_regs = 1,
1625 .ops = &ivbep_uncore_pcu_ops,
1626 .format_group = &ivbep_uncore_pcu_format_group,
1627};
1628
1629static struct intel_uncore_type *ivbep_msr_uncores[] = {
1630 &ivbep_uncore_ubox,
1631 &ivbep_uncore_cbox,
1632 &ivbep_uncore_pcu,
1633 NULL,
1634};
1635
1636void ivbep_uncore_cpu_init(void)
1637{
1638 if (ivbep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
1639 ivbep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
1640 uncore_msr_uncores = ivbep_msr_uncores;
1641}
1642
1643static struct intel_uncore_type ivbep_uncore_ha = {
1644 .name = "ha",
1645 .num_counters = 4,
1646 .num_boxes = 2,
1647 .perf_ctr_bits = 48,
1648 IVBEP_UNCORE_PCI_COMMON_INIT(),
1649};
1650
1651static struct intel_uncore_type ivbep_uncore_imc = {
1652 .name = "imc",
1653 .num_counters = 4,
1654 .num_boxes = 8,
1655 .perf_ctr_bits = 48,
1656 .fixed_ctr_bits = 48,
1657 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
1658 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
1659 .event_descs = snbep_uncore_imc_events,
1660 IVBEP_UNCORE_PCI_COMMON_INIT(),
1661};
1662
1663
1664static unsigned ivbep_uncore_irp_ctls[] = {0xd8, 0xdc, 0xe0, 0xe4};
1665static unsigned ivbep_uncore_irp_ctrs[] = {0xa0, 0xb0, 0xb8, 0xc0};
1666
1667static void ivbep_uncore_irp_enable_event(struct intel_uncore_box *box, struct perf_event *event)
1668{
1669 struct pci_dev *pdev = box->pci_dev;
1670 struct hw_perf_event *hwc = &event->hw;
1671
1672 pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx],
1673 hwc->config | SNBEP_PMON_CTL_EN);
1674}
1675
1676static void ivbep_uncore_irp_disable_event(struct intel_uncore_box *box, struct perf_event *event)
1677{
1678 struct pci_dev *pdev = box->pci_dev;
1679 struct hw_perf_event *hwc = &event->hw;
1680
1681 pci_write_config_dword(pdev, ivbep_uncore_irp_ctls[hwc->idx], hwc->config);
1682}
1683
1684static u64 ivbep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event)
1685{
1686 struct pci_dev *pdev = box->pci_dev;
1687 struct hw_perf_event *hwc = &event->hw;
1688 u64 count = 0;
1689
1690 pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx], (u32 *)&count);
1691 pci_read_config_dword(pdev, ivbep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1);
1692
1693 return count;
1694}
1695
1696static struct intel_uncore_ops ivbep_uncore_irp_ops = {
1697 .init_box = ivbep_uncore_pci_init_box,
1698 .disable_box = snbep_uncore_pci_disable_box,
1699 .enable_box = snbep_uncore_pci_enable_box,
1700 .disable_event = ivbep_uncore_irp_disable_event,
1701 .enable_event = ivbep_uncore_irp_enable_event,
1702 .read_counter = ivbep_uncore_irp_read_counter,
1703};
1704
1705static struct intel_uncore_type ivbep_uncore_irp = {
1706 .name = "irp",
1707 .num_counters = 4,
1708 .num_boxes = 1,
1709 .perf_ctr_bits = 48,
1710 .event_mask = IVBEP_PMON_RAW_EVENT_MASK,
1711 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
1712 .ops = &ivbep_uncore_irp_ops,
1713 .format_group = &ivbep_uncore_format_group,
1714};
1715
1716static struct intel_uncore_ops ivbep_uncore_qpi_ops = {
1717 .init_box = ivbep_uncore_pci_init_box,
1718 .disable_box = snbep_uncore_pci_disable_box,
1719 .enable_box = snbep_uncore_pci_enable_box,
1720 .disable_event = snbep_uncore_pci_disable_event,
1721 .enable_event = snbep_qpi_enable_event,
1722 .read_counter = snbep_uncore_pci_read_counter,
1723 .hw_config = snbep_qpi_hw_config,
1724 .get_constraint = uncore_get_constraint,
1725 .put_constraint = uncore_put_constraint,
1726};
1727
1728static struct intel_uncore_type ivbep_uncore_qpi = {
1729 .name = "qpi",
1730 .num_counters = 4,
1731 .num_boxes = 3,
1732 .perf_ctr_bits = 48,
1733 .perf_ctr = SNBEP_PCI_PMON_CTR0,
1734 .event_ctl = SNBEP_PCI_PMON_CTL0,
1735 .event_mask = IVBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
1736 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
1737 .num_shared_regs = 1,
1738 .ops = &ivbep_uncore_qpi_ops,
1739 .format_group = &ivbep_uncore_qpi_format_group,
1740};
1741
1742static struct intel_uncore_type ivbep_uncore_r2pcie = {
1743 .name = "r2pcie",
1744 .num_counters = 4,
1745 .num_boxes = 1,
1746 .perf_ctr_bits = 44,
1747 .constraints = snbep_uncore_r2pcie_constraints,
1748 IVBEP_UNCORE_PCI_COMMON_INIT(),
1749};
1750
1751static struct intel_uncore_type ivbep_uncore_r3qpi = {
1752 .name = "r3qpi",
1753 .num_counters = 3,
1754 .num_boxes = 2,
1755 .perf_ctr_bits = 44,
1756 .constraints = snbep_uncore_r3qpi_constraints,
1757 IVBEP_UNCORE_PCI_COMMON_INIT(),
1758};
1759
1760enum {
1761 IVBEP_PCI_UNCORE_HA,
1762 IVBEP_PCI_UNCORE_IMC,
1763 IVBEP_PCI_UNCORE_IRP,
1764 IVBEP_PCI_UNCORE_QPI,
1765 IVBEP_PCI_UNCORE_R2PCIE,
1766 IVBEP_PCI_UNCORE_R3QPI,
1767};
1768
1769static struct intel_uncore_type *ivbep_pci_uncores[] = {
1770 [IVBEP_PCI_UNCORE_HA] = &ivbep_uncore_ha,
1771 [IVBEP_PCI_UNCORE_IMC] = &ivbep_uncore_imc,
1772 [IVBEP_PCI_UNCORE_IRP] = &ivbep_uncore_irp,
1773 [IVBEP_PCI_UNCORE_QPI] = &ivbep_uncore_qpi,
1774 [IVBEP_PCI_UNCORE_R2PCIE] = &ivbep_uncore_r2pcie,
1775 [IVBEP_PCI_UNCORE_R3QPI] = &ivbep_uncore_r3qpi,
1776 NULL,
1777};
1778
1779static const struct pci_device_id ivbep_uncore_pci_ids[] = {
1780 {
1781 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe30),
1782 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 0),
1783 },
1784 {
1785 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe38),
1786 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_HA, 1),
1787 },
1788 {
1789 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb4),
1790 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 0),
1791 },
1792 {
1793 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb5),
1794 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 1),
1795 },
1796 {
1797 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb0),
1798 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 2),
1799 },
1800 {
1801 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xeb1),
1802 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 3),
1803 },
1804 {
1805 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef4),
1806 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 4),
1807 },
1808 {
1809 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef5),
1810 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 5),
1811 },
1812 {
1813 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef0),
1814 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 6),
1815 },
1816 {
1817 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xef1),
1818 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IMC, 7),
1819 },
1820 {
1821 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe39),
1822 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_IRP, 0),
1823 },
1824 {
1825 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe32),
1826 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 0),
1827 },
1828 {
1829 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe33),
1830 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 1),
1831 },
1832 {
1833 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3a),
1834 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_QPI, 2),
1835 },
1836 {
1837 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe34),
1838 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R2PCIE, 0),
1839 },
1840 {
1841 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe36),
1842 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 0),
1843 },
1844 {
1845 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe37),
1846 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 1),
1847 },
1848 {
1849 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe3e),
1850 .driver_data = UNCORE_PCI_DEV_DATA(IVBEP_PCI_UNCORE_R3QPI, 2),
1851 },
1852 {
1853 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe86),
1854 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1855 SNBEP_PCI_QPI_PORT0_FILTER),
1856 },
1857 {
1858 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0xe96),
1859 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
1860 SNBEP_PCI_QPI_PORT1_FILTER),
1861 },
1862 { }
1863};
1864
1865static struct pci_driver ivbep_uncore_pci_driver = {
1866 .name = "ivbep_uncore",
1867 .id_table = ivbep_uncore_pci_ids,
1868};
1869
1870int ivbep_uncore_pci_init(void)
1871{
1872 int ret = snbep_pci2phy_map_init(0x0e1e, SNBEP_CPUNODEID, SNBEP_GIDNIDMAP, true);
1873 if (ret)
1874 return ret;
1875 uncore_pci_uncores = ivbep_pci_uncores;
1876 uncore_pci_driver = &ivbep_uncore_pci_driver;
1877 return 0;
1878}
1879
1880
1881
1882static struct attribute *knl_uncore_ubox_formats_attr[] = {
1883 &format_attr_event.attr,
1884 &format_attr_umask.attr,
1885 &format_attr_edge.attr,
1886 &format_attr_tid_en.attr,
1887 &format_attr_inv.attr,
1888 &format_attr_thresh5.attr,
1889 NULL,
1890};
1891
1892static const struct attribute_group knl_uncore_ubox_format_group = {
1893 .name = "format",
1894 .attrs = knl_uncore_ubox_formats_attr,
1895};
1896
1897static struct intel_uncore_type knl_uncore_ubox = {
1898 .name = "ubox",
1899 .num_counters = 2,
1900 .num_boxes = 1,
1901 .perf_ctr_bits = 48,
1902 .fixed_ctr_bits = 48,
1903 .perf_ctr = HSWEP_U_MSR_PMON_CTR0,
1904 .event_ctl = HSWEP_U_MSR_PMON_CTL0,
1905 .event_mask = KNL_U_MSR_PMON_RAW_EVENT_MASK,
1906 .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR,
1907 .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL,
1908 .ops = &snbep_uncore_msr_ops,
1909 .format_group = &knl_uncore_ubox_format_group,
1910};
1911
1912static struct attribute *knl_uncore_cha_formats_attr[] = {
1913 &format_attr_event.attr,
1914 &format_attr_umask.attr,
1915 &format_attr_qor.attr,
1916 &format_attr_edge.attr,
1917 &format_attr_tid_en.attr,
1918 &format_attr_inv.attr,
1919 &format_attr_thresh8.attr,
1920 &format_attr_filter_tid4.attr,
1921 &format_attr_filter_link3.attr,
1922 &format_attr_filter_state4.attr,
1923 &format_attr_filter_local.attr,
1924 &format_attr_filter_all_op.attr,
1925 &format_attr_filter_nnm.attr,
1926 &format_attr_filter_opc3.attr,
1927 &format_attr_filter_nc.attr,
1928 &format_attr_filter_isoc.attr,
1929 NULL,
1930};
1931
1932static const struct attribute_group knl_uncore_cha_format_group = {
1933 .name = "format",
1934 .attrs = knl_uncore_cha_formats_attr,
1935};
1936
1937static struct event_constraint knl_uncore_cha_constraints[] = {
1938 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
1939 UNCORE_EVENT_CONSTRAINT(0x1f, 0x1),
1940 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
1941 EVENT_CONSTRAINT_END
1942};
1943
1944static struct extra_reg knl_uncore_cha_extra_regs[] = {
1945 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
1946 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
1947 SNBEP_CBO_EVENT_EXTRA_REG(0x3d, 0xff, 0x2),
1948 SNBEP_CBO_EVENT_EXTRA_REG(0x35, 0xff, 0x4),
1949 SNBEP_CBO_EVENT_EXTRA_REG(0x36, 0xff, 0x4),
1950 EVENT_EXTRA_END
1951};
1952
1953static u64 knl_cha_filter_mask(int fields)
1954{
1955 u64 mask = 0;
1956
1957 if (fields & 0x1)
1958 mask |= KNL_CHA_MSR_PMON_BOX_FILTER_TID;
1959 if (fields & 0x2)
1960 mask |= KNL_CHA_MSR_PMON_BOX_FILTER_STATE;
1961 if (fields & 0x4)
1962 mask |= KNL_CHA_MSR_PMON_BOX_FILTER_OP;
1963 return mask;
1964}
1965
1966static struct event_constraint *
1967knl_cha_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
1968{
1969 return __snbep_cbox_get_constraint(box, event, knl_cha_filter_mask);
1970}
1971
1972static int knl_cha_hw_config(struct intel_uncore_box *box,
1973 struct perf_event *event)
1974{
1975 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
1976 struct extra_reg *er;
1977 int idx = 0;
1978
1979 for (er = knl_uncore_cha_extra_regs; er->msr; er++) {
1980 if (er->event != (event->hw.config & er->config_mask))
1981 continue;
1982 idx |= er->idx;
1983 }
1984
1985 if (idx) {
1986 reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 +
1987 KNL_CHA_MSR_OFFSET * box->pmu->pmu_idx;
1988 reg1->config = event->attr.config1 & knl_cha_filter_mask(idx);
1989
1990 reg1->config |= KNL_CHA_MSR_PMON_BOX_FILTER_REMOTE_NODE;
1991 reg1->config |= KNL_CHA_MSR_PMON_BOX_FILTER_LOCAL_NODE;
1992 reg1->config |= KNL_CHA_MSR_PMON_BOX_FILTER_NNC;
1993 reg1->idx = idx;
1994 }
1995 return 0;
1996}
1997
1998static void hswep_cbox_enable_event(struct intel_uncore_box *box,
1999 struct perf_event *event);
2000
2001static struct intel_uncore_ops knl_uncore_cha_ops = {
2002 .init_box = snbep_uncore_msr_init_box,
2003 .disable_box = snbep_uncore_msr_disable_box,
2004 .enable_box = snbep_uncore_msr_enable_box,
2005 .disable_event = snbep_uncore_msr_disable_event,
2006 .enable_event = hswep_cbox_enable_event,
2007 .read_counter = uncore_msr_read_counter,
2008 .hw_config = knl_cha_hw_config,
2009 .get_constraint = knl_cha_get_constraint,
2010 .put_constraint = snbep_cbox_put_constraint,
2011};
2012
2013static struct intel_uncore_type knl_uncore_cha = {
2014 .name = "cha",
2015 .num_counters = 4,
2016 .num_boxes = 38,
2017 .perf_ctr_bits = 48,
2018 .event_ctl = HSWEP_C0_MSR_PMON_CTL0,
2019 .perf_ctr = HSWEP_C0_MSR_PMON_CTR0,
2020 .event_mask = KNL_CHA_MSR_PMON_RAW_EVENT_MASK,
2021 .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL,
2022 .msr_offset = KNL_CHA_MSR_OFFSET,
2023 .num_shared_regs = 1,
2024 .constraints = knl_uncore_cha_constraints,
2025 .ops = &knl_uncore_cha_ops,
2026 .format_group = &knl_uncore_cha_format_group,
2027};
2028
2029static struct attribute *knl_uncore_pcu_formats_attr[] = {
2030 &format_attr_event2.attr,
2031 &format_attr_use_occ_ctr.attr,
2032 &format_attr_occ_sel.attr,
2033 &format_attr_edge.attr,
2034 &format_attr_tid_en.attr,
2035 &format_attr_inv.attr,
2036 &format_attr_thresh6.attr,
2037 &format_attr_occ_invert.attr,
2038 &format_attr_occ_edge_det.attr,
2039 NULL,
2040};
2041
2042static const struct attribute_group knl_uncore_pcu_format_group = {
2043 .name = "format",
2044 .attrs = knl_uncore_pcu_formats_attr,
2045};
2046
2047static struct intel_uncore_type knl_uncore_pcu = {
2048 .name = "pcu",
2049 .num_counters = 4,
2050 .num_boxes = 1,
2051 .perf_ctr_bits = 48,
2052 .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0,
2053 .event_ctl = HSWEP_PCU_MSR_PMON_CTL0,
2054 .event_mask = KNL_PCU_MSR_PMON_RAW_EVENT_MASK,
2055 .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL,
2056 .ops = &snbep_uncore_msr_ops,
2057 .format_group = &knl_uncore_pcu_format_group,
2058};
2059
2060static struct intel_uncore_type *knl_msr_uncores[] = {
2061 &knl_uncore_ubox,
2062 &knl_uncore_cha,
2063 &knl_uncore_pcu,
2064 NULL,
2065};
2066
2067void knl_uncore_cpu_init(void)
2068{
2069 uncore_msr_uncores = knl_msr_uncores;
2070}
2071
2072static void knl_uncore_imc_enable_box(struct intel_uncore_box *box)
2073{
2074 struct pci_dev *pdev = box->pci_dev;
2075 int box_ctl = uncore_pci_box_ctl(box);
2076
2077 pci_write_config_dword(pdev, box_ctl, 0);
2078}
2079
2080static void knl_uncore_imc_enable_event(struct intel_uncore_box *box,
2081 struct perf_event *event)
2082{
2083 struct pci_dev *pdev = box->pci_dev;
2084 struct hw_perf_event *hwc = &event->hw;
2085
2086 if ((event->attr.config & SNBEP_PMON_CTL_EV_SEL_MASK)
2087 == UNCORE_FIXED_EVENT)
2088 pci_write_config_dword(pdev, hwc->config_base,
2089 hwc->config | KNL_PMON_FIXED_CTL_EN);
2090 else
2091 pci_write_config_dword(pdev, hwc->config_base,
2092 hwc->config | SNBEP_PMON_CTL_EN);
2093}
2094
2095static struct intel_uncore_ops knl_uncore_imc_ops = {
2096 .init_box = snbep_uncore_pci_init_box,
2097 .disable_box = snbep_uncore_pci_disable_box,
2098 .enable_box = knl_uncore_imc_enable_box,
2099 .read_counter = snbep_uncore_pci_read_counter,
2100 .enable_event = knl_uncore_imc_enable_event,
2101 .disable_event = snbep_uncore_pci_disable_event,
2102};
2103
2104static struct intel_uncore_type knl_uncore_imc_uclk = {
2105 .name = "imc_uclk",
2106 .num_counters = 4,
2107 .num_boxes = 2,
2108 .perf_ctr_bits = 48,
2109 .fixed_ctr_bits = 48,
2110 .perf_ctr = KNL_UCLK_MSR_PMON_CTR0_LOW,
2111 .event_ctl = KNL_UCLK_MSR_PMON_CTL0,
2112 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2113 .fixed_ctr = KNL_UCLK_MSR_PMON_UCLK_FIXED_LOW,
2114 .fixed_ctl = KNL_UCLK_MSR_PMON_UCLK_FIXED_CTL,
2115 .box_ctl = KNL_UCLK_MSR_PMON_BOX_CTL,
2116 .ops = &knl_uncore_imc_ops,
2117 .format_group = &snbep_uncore_format_group,
2118};
2119
2120static struct intel_uncore_type knl_uncore_imc_dclk = {
2121 .name = "imc",
2122 .num_counters = 4,
2123 .num_boxes = 6,
2124 .perf_ctr_bits = 48,
2125 .fixed_ctr_bits = 48,
2126 .perf_ctr = KNL_MC0_CH0_MSR_PMON_CTR0_LOW,
2127 .event_ctl = KNL_MC0_CH0_MSR_PMON_CTL0,
2128 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2129 .fixed_ctr = KNL_MC0_CH0_MSR_PMON_FIXED_LOW,
2130 .fixed_ctl = KNL_MC0_CH0_MSR_PMON_FIXED_CTL,
2131 .box_ctl = KNL_MC0_CH0_MSR_PMON_BOX_CTL,
2132 .ops = &knl_uncore_imc_ops,
2133 .format_group = &snbep_uncore_format_group,
2134};
2135
2136static struct intel_uncore_type knl_uncore_edc_uclk = {
2137 .name = "edc_uclk",
2138 .num_counters = 4,
2139 .num_boxes = 8,
2140 .perf_ctr_bits = 48,
2141 .fixed_ctr_bits = 48,
2142 .perf_ctr = KNL_UCLK_MSR_PMON_CTR0_LOW,
2143 .event_ctl = KNL_UCLK_MSR_PMON_CTL0,
2144 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2145 .fixed_ctr = KNL_UCLK_MSR_PMON_UCLK_FIXED_LOW,
2146 .fixed_ctl = KNL_UCLK_MSR_PMON_UCLK_FIXED_CTL,
2147 .box_ctl = KNL_UCLK_MSR_PMON_BOX_CTL,
2148 .ops = &knl_uncore_imc_ops,
2149 .format_group = &snbep_uncore_format_group,
2150};
2151
2152static struct intel_uncore_type knl_uncore_edc_eclk = {
2153 .name = "edc_eclk",
2154 .num_counters = 4,
2155 .num_boxes = 8,
2156 .perf_ctr_bits = 48,
2157 .fixed_ctr_bits = 48,
2158 .perf_ctr = KNL_EDC0_ECLK_MSR_PMON_CTR0_LOW,
2159 .event_ctl = KNL_EDC0_ECLK_MSR_PMON_CTL0,
2160 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2161 .fixed_ctr = KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_LOW,
2162 .fixed_ctl = KNL_EDC0_ECLK_MSR_PMON_ECLK_FIXED_CTL,
2163 .box_ctl = KNL_EDC0_ECLK_MSR_PMON_BOX_CTL,
2164 .ops = &knl_uncore_imc_ops,
2165 .format_group = &snbep_uncore_format_group,
2166};
2167
2168static struct event_constraint knl_uncore_m2pcie_constraints[] = {
2169 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
2170 EVENT_CONSTRAINT_END
2171};
2172
2173static struct intel_uncore_type knl_uncore_m2pcie = {
2174 .name = "m2pcie",
2175 .num_counters = 4,
2176 .num_boxes = 1,
2177 .perf_ctr_bits = 48,
2178 .constraints = knl_uncore_m2pcie_constraints,
2179 SNBEP_UNCORE_PCI_COMMON_INIT(),
2180};
2181
2182static struct attribute *knl_uncore_irp_formats_attr[] = {
2183 &format_attr_event.attr,
2184 &format_attr_umask.attr,
2185 &format_attr_qor.attr,
2186 &format_attr_edge.attr,
2187 &format_attr_inv.attr,
2188 &format_attr_thresh8.attr,
2189 NULL,
2190};
2191
2192static const struct attribute_group knl_uncore_irp_format_group = {
2193 .name = "format",
2194 .attrs = knl_uncore_irp_formats_attr,
2195};
2196
2197static struct intel_uncore_type knl_uncore_irp = {
2198 .name = "irp",
2199 .num_counters = 2,
2200 .num_boxes = 1,
2201 .perf_ctr_bits = 48,
2202 .perf_ctr = SNBEP_PCI_PMON_CTR0,
2203 .event_ctl = SNBEP_PCI_PMON_CTL0,
2204 .event_mask = KNL_IRP_PCI_PMON_RAW_EVENT_MASK,
2205 .box_ctl = KNL_IRP_PCI_PMON_BOX_CTL,
2206 .ops = &snbep_uncore_pci_ops,
2207 .format_group = &knl_uncore_irp_format_group,
2208};
2209
2210enum {
2211 KNL_PCI_UNCORE_MC_UCLK,
2212 KNL_PCI_UNCORE_MC_DCLK,
2213 KNL_PCI_UNCORE_EDC_UCLK,
2214 KNL_PCI_UNCORE_EDC_ECLK,
2215 KNL_PCI_UNCORE_M2PCIE,
2216 KNL_PCI_UNCORE_IRP,
2217};
2218
2219static struct intel_uncore_type *knl_pci_uncores[] = {
2220 [KNL_PCI_UNCORE_MC_UCLK] = &knl_uncore_imc_uclk,
2221 [KNL_PCI_UNCORE_MC_DCLK] = &knl_uncore_imc_dclk,
2222 [KNL_PCI_UNCORE_EDC_UCLK] = &knl_uncore_edc_uclk,
2223 [KNL_PCI_UNCORE_EDC_ECLK] = &knl_uncore_edc_eclk,
2224 [KNL_PCI_UNCORE_M2PCIE] = &knl_uncore_m2pcie,
2225 [KNL_PCI_UNCORE_IRP] = &knl_uncore_irp,
2226 NULL,
2227};
2228
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247static const struct pci_device_id knl_uncore_pci_ids[] = {
2248 {
2249 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7841),
2250 .driver_data = UNCORE_PCI_DEV_FULL_DATA(10, 0, KNL_PCI_UNCORE_MC_UCLK, 0),
2251 },
2252 {
2253 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7841),
2254 .driver_data = UNCORE_PCI_DEV_FULL_DATA(11, 0, KNL_PCI_UNCORE_MC_UCLK, 1),
2255 },
2256 {
2257 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7843),
2258 .driver_data = UNCORE_PCI_DEV_FULL_DATA(8, 2, KNL_PCI_UNCORE_MC_DCLK, 0),
2259 },
2260 {
2261 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7843),
2262 .driver_data = UNCORE_PCI_DEV_FULL_DATA(8, 3, KNL_PCI_UNCORE_MC_DCLK, 1),
2263 },
2264 {
2265 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7843),
2266 .driver_data = UNCORE_PCI_DEV_FULL_DATA(8, 4, KNL_PCI_UNCORE_MC_DCLK, 2),
2267 },
2268 {
2269 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7843),
2270 .driver_data = UNCORE_PCI_DEV_FULL_DATA(9, 2, KNL_PCI_UNCORE_MC_DCLK, 3),
2271 },
2272 {
2273 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7843),
2274 .driver_data = UNCORE_PCI_DEV_FULL_DATA(9, 3, KNL_PCI_UNCORE_MC_DCLK, 4),
2275 },
2276 {
2277 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7843),
2278 .driver_data = UNCORE_PCI_DEV_FULL_DATA(9, 4, KNL_PCI_UNCORE_MC_DCLK, 5),
2279 },
2280 {
2281 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2282 .driver_data = UNCORE_PCI_DEV_FULL_DATA(15, 0, KNL_PCI_UNCORE_EDC_UCLK, 0),
2283 },
2284 {
2285 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2286 .driver_data = UNCORE_PCI_DEV_FULL_DATA(16, 0, KNL_PCI_UNCORE_EDC_UCLK, 1),
2287 },
2288 {
2289 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2290 .driver_data = UNCORE_PCI_DEV_FULL_DATA(17, 0, KNL_PCI_UNCORE_EDC_UCLK, 2),
2291 },
2292 {
2293 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2294 .driver_data = UNCORE_PCI_DEV_FULL_DATA(18, 0, KNL_PCI_UNCORE_EDC_UCLK, 3),
2295 },
2296 {
2297 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2298 .driver_data = UNCORE_PCI_DEV_FULL_DATA(19, 0, KNL_PCI_UNCORE_EDC_UCLK, 4),
2299 },
2300 {
2301 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2302 .driver_data = UNCORE_PCI_DEV_FULL_DATA(20, 0, KNL_PCI_UNCORE_EDC_UCLK, 5),
2303 },
2304 {
2305 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2306 .driver_data = UNCORE_PCI_DEV_FULL_DATA(21, 0, KNL_PCI_UNCORE_EDC_UCLK, 6),
2307 },
2308 {
2309 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7833),
2310 .driver_data = UNCORE_PCI_DEV_FULL_DATA(22, 0, KNL_PCI_UNCORE_EDC_UCLK, 7),
2311 },
2312 {
2313 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2314 .driver_data = UNCORE_PCI_DEV_FULL_DATA(24, 2, KNL_PCI_UNCORE_EDC_ECLK, 0),
2315 },
2316 {
2317 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2318 .driver_data = UNCORE_PCI_DEV_FULL_DATA(25, 2, KNL_PCI_UNCORE_EDC_ECLK, 1),
2319 },
2320 {
2321 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2322 .driver_data = UNCORE_PCI_DEV_FULL_DATA(26, 2, KNL_PCI_UNCORE_EDC_ECLK, 2),
2323 },
2324 {
2325 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2326 .driver_data = UNCORE_PCI_DEV_FULL_DATA(27, 2, KNL_PCI_UNCORE_EDC_ECLK, 3),
2327 },
2328 {
2329 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2330 .driver_data = UNCORE_PCI_DEV_FULL_DATA(28, 2, KNL_PCI_UNCORE_EDC_ECLK, 4),
2331 },
2332 {
2333 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2334 .driver_data = UNCORE_PCI_DEV_FULL_DATA(29, 2, KNL_PCI_UNCORE_EDC_ECLK, 5),
2335 },
2336 {
2337 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2338 .driver_data = UNCORE_PCI_DEV_FULL_DATA(30, 2, KNL_PCI_UNCORE_EDC_ECLK, 6),
2339 },
2340 {
2341 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7835),
2342 .driver_data = UNCORE_PCI_DEV_FULL_DATA(31, 2, KNL_PCI_UNCORE_EDC_ECLK, 7),
2343 },
2344 {
2345 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7817),
2346 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_M2PCIE, 0),
2347 },
2348 {
2349 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x7814),
2350 .driver_data = UNCORE_PCI_DEV_DATA(KNL_PCI_UNCORE_IRP, 0),
2351 },
2352 { }
2353};
2354
2355static struct pci_driver knl_uncore_pci_driver = {
2356 .name = "knl_uncore",
2357 .id_table = knl_uncore_pci_ids,
2358};
2359
2360int knl_uncore_pci_init(void)
2361{
2362 int ret;
2363
2364
2365 ret = snb_pci2phy_map_init(0x7814);
2366 if (ret)
2367 return ret;
2368 ret = snb_pci2phy_map_init(0x7817);
2369 if (ret)
2370 return ret;
2371 uncore_pci_uncores = knl_pci_uncores;
2372 uncore_pci_driver = &knl_uncore_pci_driver;
2373 return 0;
2374}
2375
2376
2377
2378
2379static struct attribute *hswep_uncore_ubox_formats_attr[] = {
2380 &format_attr_event.attr,
2381 &format_attr_umask.attr,
2382 &format_attr_edge.attr,
2383 &format_attr_inv.attr,
2384 &format_attr_thresh5.attr,
2385 &format_attr_filter_tid2.attr,
2386 &format_attr_filter_cid.attr,
2387 NULL,
2388};
2389
2390static const struct attribute_group hswep_uncore_ubox_format_group = {
2391 .name = "format",
2392 .attrs = hswep_uncore_ubox_formats_attr,
2393};
2394
2395static int hswep_ubox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
2396{
2397 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
2398 reg1->reg = HSWEP_U_MSR_PMON_FILTER;
2399 reg1->config = event->attr.config1 & HSWEP_U_MSR_PMON_BOX_FILTER_MASK;
2400 reg1->idx = 0;
2401 return 0;
2402}
2403
2404static struct intel_uncore_ops hswep_uncore_ubox_ops = {
2405 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
2406 .hw_config = hswep_ubox_hw_config,
2407 .get_constraint = uncore_get_constraint,
2408 .put_constraint = uncore_put_constraint,
2409};
2410
2411static struct intel_uncore_type hswep_uncore_ubox = {
2412 .name = "ubox",
2413 .num_counters = 2,
2414 .num_boxes = 1,
2415 .perf_ctr_bits = 44,
2416 .fixed_ctr_bits = 48,
2417 .perf_ctr = HSWEP_U_MSR_PMON_CTR0,
2418 .event_ctl = HSWEP_U_MSR_PMON_CTL0,
2419 .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK,
2420 .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR,
2421 .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL,
2422 .num_shared_regs = 1,
2423 .ops = &hswep_uncore_ubox_ops,
2424 .format_group = &hswep_uncore_ubox_format_group,
2425};
2426
2427static struct attribute *hswep_uncore_cbox_formats_attr[] = {
2428 &format_attr_event.attr,
2429 &format_attr_umask.attr,
2430 &format_attr_edge.attr,
2431 &format_attr_tid_en.attr,
2432 &format_attr_thresh8.attr,
2433 &format_attr_filter_tid3.attr,
2434 &format_attr_filter_link2.attr,
2435 &format_attr_filter_state3.attr,
2436 &format_attr_filter_nid2.attr,
2437 &format_attr_filter_opc2.attr,
2438 &format_attr_filter_nc.attr,
2439 &format_attr_filter_c6.attr,
2440 &format_attr_filter_isoc.attr,
2441 NULL,
2442};
2443
2444static const struct attribute_group hswep_uncore_cbox_format_group = {
2445 .name = "format",
2446 .attrs = hswep_uncore_cbox_formats_attr,
2447};
2448
2449static struct event_constraint hswep_uncore_cbox_constraints[] = {
2450 UNCORE_EVENT_CONSTRAINT(0x01, 0x1),
2451 UNCORE_EVENT_CONSTRAINT(0x09, 0x1),
2452 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
2453 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
2454 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
2455 UNCORE_EVENT_CONSTRAINT(0x3b, 0x1),
2456 UNCORE_EVENT_CONSTRAINT(0x3e, 0x1),
2457 EVENT_CONSTRAINT_END
2458};
2459
2460static struct extra_reg hswep_uncore_cbox_extra_regs[] = {
2461 SNBEP_CBO_EVENT_EXTRA_REG(SNBEP_CBO_PMON_CTL_TID_EN,
2462 SNBEP_CBO_PMON_CTL_TID_EN, 0x1),
2463 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
2464 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
2465 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
2466 SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4),
2467 SNBEP_CBO_EVENT_EXTRA_REG(0x2134, 0xffff, 0x4),
2468 SNBEP_CBO_EVENT_EXTRA_REG(0x4134, 0xffff, 0x4),
2469 SNBEP_CBO_EVENT_EXTRA_REG(0x4037, 0x40ff, 0x8),
2470 SNBEP_CBO_EVENT_EXTRA_REG(0x4028, 0x40ff, 0x8),
2471 SNBEP_CBO_EVENT_EXTRA_REG(0x4032, 0x40ff, 0x8),
2472 SNBEP_CBO_EVENT_EXTRA_REG(0x4029, 0x40ff, 0x8),
2473 SNBEP_CBO_EVENT_EXTRA_REG(0x4033, 0x40ff, 0x8),
2474 SNBEP_CBO_EVENT_EXTRA_REG(0x402A, 0x40ff, 0x8),
2475 SNBEP_CBO_EVENT_EXTRA_REG(0x0135, 0xffff, 0x12),
2476 SNBEP_CBO_EVENT_EXTRA_REG(0x0335, 0xffff, 0x10),
2477 SNBEP_CBO_EVENT_EXTRA_REG(0x4135, 0xffff, 0x18),
2478 SNBEP_CBO_EVENT_EXTRA_REG(0x4435, 0xffff, 0x8),
2479 SNBEP_CBO_EVENT_EXTRA_REG(0x4835, 0xffff, 0x8),
2480 SNBEP_CBO_EVENT_EXTRA_REG(0x5035, 0xffff, 0x8),
2481 SNBEP_CBO_EVENT_EXTRA_REG(0x4335, 0xffff, 0x18),
2482 SNBEP_CBO_EVENT_EXTRA_REG(0x4a35, 0xffff, 0x8),
2483 SNBEP_CBO_EVENT_EXTRA_REG(0x2335, 0xffff, 0x10),
2484 SNBEP_CBO_EVENT_EXTRA_REG(0x8335, 0xffff, 0x10),
2485 SNBEP_CBO_EVENT_EXTRA_REG(0x2135, 0xffff, 0x10),
2486 SNBEP_CBO_EVENT_EXTRA_REG(0x8135, 0xffff, 0x10),
2487 SNBEP_CBO_EVENT_EXTRA_REG(0x0136, 0xffff, 0x10),
2488 SNBEP_CBO_EVENT_EXTRA_REG(0x0336, 0xffff, 0x10),
2489 SNBEP_CBO_EVENT_EXTRA_REG(0x4136, 0xffff, 0x18),
2490 SNBEP_CBO_EVENT_EXTRA_REG(0x4436, 0xffff, 0x8),
2491 SNBEP_CBO_EVENT_EXTRA_REG(0x4836, 0xffff, 0x8),
2492 SNBEP_CBO_EVENT_EXTRA_REG(0x4336, 0xffff, 0x18),
2493 SNBEP_CBO_EVENT_EXTRA_REG(0x4a36, 0xffff, 0x8),
2494 SNBEP_CBO_EVENT_EXTRA_REG(0x2336, 0xffff, 0x10),
2495 SNBEP_CBO_EVENT_EXTRA_REG(0x8336, 0xffff, 0x10),
2496 SNBEP_CBO_EVENT_EXTRA_REG(0x2136, 0xffff, 0x10),
2497 SNBEP_CBO_EVENT_EXTRA_REG(0x8136, 0xffff, 0x10),
2498 SNBEP_CBO_EVENT_EXTRA_REG(0x5036, 0xffff, 0x8),
2499 EVENT_EXTRA_END
2500};
2501
2502static u64 hswep_cbox_filter_mask(int fields)
2503{
2504 u64 mask = 0;
2505 if (fields & 0x1)
2506 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_TID;
2507 if (fields & 0x2)
2508 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_LINK;
2509 if (fields & 0x4)
2510 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_STATE;
2511 if (fields & 0x8)
2512 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NID;
2513 if (fields & 0x10) {
2514 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_OPC;
2515 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_NC;
2516 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_C6;
2517 mask |= HSWEP_CB0_MSR_PMON_BOX_FILTER_ISOC;
2518 }
2519 return mask;
2520}
2521
2522static struct event_constraint *
2523hswep_cbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
2524{
2525 return __snbep_cbox_get_constraint(box, event, hswep_cbox_filter_mask);
2526}
2527
2528static int hswep_cbox_hw_config(struct intel_uncore_box *box, struct perf_event *event)
2529{
2530 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
2531 struct extra_reg *er;
2532 int idx = 0;
2533
2534 for (er = hswep_uncore_cbox_extra_regs; er->msr; er++) {
2535 if (er->event != (event->hw.config & er->config_mask))
2536 continue;
2537 idx |= er->idx;
2538 }
2539
2540 if (idx) {
2541 reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 +
2542 HSWEP_CBO_MSR_OFFSET * box->pmu->pmu_idx;
2543 reg1->config = event->attr.config1 & hswep_cbox_filter_mask(idx);
2544 reg1->idx = idx;
2545 }
2546 return 0;
2547}
2548
2549static void hswep_cbox_enable_event(struct intel_uncore_box *box,
2550 struct perf_event *event)
2551{
2552 struct hw_perf_event *hwc = &event->hw;
2553 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
2554
2555 if (reg1->idx != EXTRA_REG_NONE) {
2556 u64 filter = uncore_shared_reg_config(box, 0);
2557 wrmsrl(reg1->reg, filter & 0xffffffff);
2558 wrmsrl(reg1->reg + 1, filter >> 32);
2559 }
2560
2561 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
2562}
2563
2564static struct intel_uncore_ops hswep_uncore_cbox_ops = {
2565 .init_box = snbep_uncore_msr_init_box,
2566 .disable_box = snbep_uncore_msr_disable_box,
2567 .enable_box = snbep_uncore_msr_enable_box,
2568 .disable_event = snbep_uncore_msr_disable_event,
2569 .enable_event = hswep_cbox_enable_event,
2570 .read_counter = uncore_msr_read_counter,
2571 .hw_config = hswep_cbox_hw_config,
2572 .get_constraint = hswep_cbox_get_constraint,
2573 .put_constraint = snbep_cbox_put_constraint,
2574};
2575
2576static struct intel_uncore_type hswep_uncore_cbox = {
2577 .name = "cbox",
2578 .num_counters = 4,
2579 .num_boxes = 18,
2580 .perf_ctr_bits = 48,
2581 .event_ctl = HSWEP_C0_MSR_PMON_CTL0,
2582 .perf_ctr = HSWEP_C0_MSR_PMON_CTR0,
2583 .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
2584 .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL,
2585 .msr_offset = HSWEP_CBO_MSR_OFFSET,
2586 .num_shared_regs = 1,
2587 .constraints = hswep_uncore_cbox_constraints,
2588 .ops = &hswep_uncore_cbox_ops,
2589 .format_group = &hswep_uncore_cbox_format_group,
2590};
2591
2592
2593
2594
2595static void hswep_uncore_sbox_msr_init_box(struct intel_uncore_box *box)
2596{
2597 unsigned msr = uncore_msr_box_ctl(box);
2598
2599 if (msr) {
2600 u64 init = SNBEP_PMON_BOX_CTL_INT;
2601 u64 flags = 0;
2602 int i;
2603
2604 for_each_set_bit(i, (unsigned long *)&init, 64) {
2605 flags |= (1ULL << i);
2606 wrmsrl(msr, flags);
2607 }
2608 }
2609}
2610
2611static struct intel_uncore_ops hswep_uncore_sbox_msr_ops = {
2612 __SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
2613 .init_box = hswep_uncore_sbox_msr_init_box
2614};
2615
2616static struct attribute *hswep_uncore_sbox_formats_attr[] = {
2617 &format_attr_event.attr,
2618 &format_attr_umask.attr,
2619 &format_attr_edge.attr,
2620 &format_attr_tid_en.attr,
2621 &format_attr_inv.attr,
2622 &format_attr_thresh8.attr,
2623 NULL,
2624};
2625
2626static const struct attribute_group hswep_uncore_sbox_format_group = {
2627 .name = "format",
2628 .attrs = hswep_uncore_sbox_formats_attr,
2629};
2630
2631static struct intel_uncore_type hswep_uncore_sbox = {
2632 .name = "sbox",
2633 .num_counters = 4,
2634 .num_boxes = 4,
2635 .perf_ctr_bits = 44,
2636 .event_ctl = HSWEP_S0_MSR_PMON_CTL0,
2637 .perf_ctr = HSWEP_S0_MSR_PMON_CTR0,
2638 .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK,
2639 .box_ctl = HSWEP_S0_MSR_PMON_BOX_CTL,
2640 .msr_offset = HSWEP_SBOX_MSR_OFFSET,
2641 .ops = &hswep_uncore_sbox_msr_ops,
2642 .format_group = &hswep_uncore_sbox_format_group,
2643};
2644
2645static int hswep_pcu_hw_config(struct intel_uncore_box *box, struct perf_event *event)
2646{
2647 struct hw_perf_event *hwc = &event->hw;
2648 struct hw_perf_event_extra *reg1 = &hwc->extra_reg;
2649 int ev_sel = hwc->config & SNBEP_PMON_CTL_EV_SEL_MASK;
2650
2651 if (ev_sel >= 0xb && ev_sel <= 0xe) {
2652 reg1->reg = HSWEP_PCU_MSR_PMON_BOX_FILTER;
2653 reg1->idx = ev_sel - 0xb;
2654 reg1->config = event->attr.config1 & (0xff << reg1->idx);
2655 }
2656 return 0;
2657}
2658
2659static struct intel_uncore_ops hswep_uncore_pcu_ops = {
2660 SNBEP_UNCORE_MSR_OPS_COMMON_INIT(),
2661 .hw_config = hswep_pcu_hw_config,
2662 .get_constraint = snbep_pcu_get_constraint,
2663 .put_constraint = snbep_pcu_put_constraint,
2664};
2665
2666static struct intel_uncore_type hswep_uncore_pcu = {
2667 .name = "pcu",
2668 .num_counters = 4,
2669 .num_boxes = 1,
2670 .perf_ctr_bits = 48,
2671 .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0,
2672 .event_ctl = HSWEP_PCU_MSR_PMON_CTL0,
2673 .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK,
2674 .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL,
2675 .num_shared_regs = 1,
2676 .ops = &hswep_uncore_pcu_ops,
2677 .format_group = &snbep_uncore_pcu_format_group,
2678};
2679
2680static struct intel_uncore_type *hswep_msr_uncores[] = {
2681 &hswep_uncore_ubox,
2682 &hswep_uncore_cbox,
2683 &hswep_uncore_sbox,
2684 &hswep_uncore_pcu,
2685 NULL,
2686};
2687
2688void hswep_uncore_cpu_init(void)
2689{
2690 int pkg = boot_cpu_data.logical_proc_id;
2691
2692 if (hswep_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
2693 hswep_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
2694
2695
2696 if (uncore_extra_pci_dev[pkg].dev[HSWEP_PCI_PCU_3]) {
2697 u32 capid4;
2698
2699 pci_read_config_dword(uncore_extra_pci_dev[pkg].dev[HSWEP_PCI_PCU_3],
2700 0x94, &capid4);
2701 if (((capid4 >> 6) & 0x3) == 0)
2702 hswep_uncore_sbox.num_boxes = 2;
2703 }
2704
2705 uncore_msr_uncores = hswep_msr_uncores;
2706}
2707
2708static struct intel_uncore_type hswep_uncore_ha = {
2709 .name = "ha",
2710 .num_counters = 4,
2711 .num_boxes = 2,
2712 .perf_ctr_bits = 48,
2713 SNBEP_UNCORE_PCI_COMMON_INIT(),
2714};
2715
2716static struct uncore_event_desc hswep_uncore_imc_events[] = {
2717 INTEL_UNCORE_EVENT_DESC(clockticks, "event=0x00,umask=0x00"),
2718 INTEL_UNCORE_EVENT_DESC(cas_count_read, "event=0x04,umask=0x03"),
2719 INTEL_UNCORE_EVENT_DESC(cas_count_read.scale, "6.103515625e-5"),
2720 INTEL_UNCORE_EVENT_DESC(cas_count_read.unit, "MiB"),
2721 INTEL_UNCORE_EVENT_DESC(cas_count_write, "event=0x04,umask=0x0c"),
2722 INTEL_UNCORE_EVENT_DESC(cas_count_write.scale, "6.103515625e-5"),
2723 INTEL_UNCORE_EVENT_DESC(cas_count_write.unit, "MiB"),
2724 { },
2725};
2726
2727static struct intel_uncore_type hswep_uncore_imc = {
2728 .name = "imc",
2729 .num_counters = 4,
2730 .num_boxes = 8,
2731 .perf_ctr_bits = 48,
2732 .fixed_ctr_bits = 48,
2733 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
2734 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
2735 .event_descs = hswep_uncore_imc_events,
2736 SNBEP_UNCORE_PCI_COMMON_INIT(),
2737};
2738
2739static unsigned hswep_uncore_irp_ctrs[] = {0xa0, 0xa8, 0xb0, 0xb8};
2740
2741static u64 hswep_uncore_irp_read_counter(struct intel_uncore_box *box, struct perf_event *event)
2742{
2743 struct pci_dev *pdev = box->pci_dev;
2744 struct hw_perf_event *hwc = &event->hw;
2745 u64 count = 0;
2746
2747 pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx], (u32 *)&count);
2748 pci_read_config_dword(pdev, hswep_uncore_irp_ctrs[hwc->idx] + 4, (u32 *)&count + 1);
2749
2750 return count;
2751}
2752
2753static struct intel_uncore_ops hswep_uncore_irp_ops = {
2754 .init_box = snbep_uncore_pci_init_box,
2755 .disable_box = snbep_uncore_pci_disable_box,
2756 .enable_box = snbep_uncore_pci_enable_box,
2757 .disable_event = ivbep_uncore_irp_disable_event,
2758 .enable_event = ivbep_uncore_irp_enable_event,
2759 .read_counter = hswep_uncore_irp_read_counter,
2760};
2761
2762static struct intel_uncore_type hswep_uncore_irp = {
2763 .name = "irp",
2764 .num_counters = 4,
2765 .num_boxes = 1,
2766 .perf_ctr_bits = 48,
2767 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
2768 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
2769 .ops = &hswep_uncore_irp_ops,
2770 .format_group = &snbep_uncore_format_group,
2771};
2772
2773static struct intel_uncore_type hswep_uncore_qpi = {
2774 .name = "qpi",
2775 .num_counters = 4,
2776 .num_boxes = 3,
2777 .perf_ctr_bits = 48,
2778 .perf_ctr = SNBEP_PCI_PMON_CTR0,
2779 .event_ctl = SNBEP_PCI_PMON_CTL0,
2780 .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
2781 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
2782 .num_shared_regs = 1,
2783 .ops = &snbep_uncore_qpi_ops,
2784 .format_group = &snbep_uncore_qpi_format_group,
2785};
2786
2787static struct event_constraint hswep_uncore_r2pcie_constraints[] = {
2788 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2789 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2790 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2791 UNCORE_EVENT_CONSTRAINT(0x23, 0x1),
2792 UNCORE_EVENT_CONSTRAINT(0x24, 0x1),
2793 UNCORE_EVENT_CONSTRAINT(0x25, 0x1),
2794 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2795 UNCORE_EVENT_CONSTRAINT(0x27, 0x1),
2796 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2797 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
2798 UNCORE_EVENT_CONSTRAINT(0x2a, 0x1),
2799 UNCORE_EVENT_CONSTRAINT(0x2b, 0x3),
2800 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2801 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2802 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
2803 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
2804 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
2805 UNCORE_EVENT_CONSTRAINT(0x35, 0x3),
2806 EVENT_CONSTRAINT_END
2807};
2808
2809static struct intel_uncore_type hswep_uncore_r2pcie = {
2810 .name = "r2pcie",
2811 .num_counters = 4,
2812 .num_boxes = 1,
2813 .perf_ctr_bits = 48,
2814 .constraints = hswep_uncore_r2pcie_constraints,
2815 SNBEP_UNCORE_PCI_COMMON_INIT(),
2816};
2817
2818static struct event_constraint hswep_uncore_r3qpi_constraints[] = {
2819 UNCORE_EVENT_CONSTRAINT(0x01, 0x3),
2820 UNCORE_EVENT_CONSTRAINT(0x07, 0x7),
2821 UNCORE_EVENT_CONSTRAINT(0x08, 0x7),
2822 UNCORE_EVENT_CONSTRAINT(0x09, 0x7),
2823 UNCORE_EVENT_CONSTRAINT(0x0a, 0x7),
2824 UNCORE_EVENT_CONSTRAINT(0x0e, 0x7),
2825 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
2826 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
2827 UNCORE_EVENT_CONSTRAINT(0x12, 0x3),
2828 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
2829 UNCORE_EVENT_CONSTRAINT(0x14, 0x3),
2830 UNCORE_EVENT_CONSTRAINT(0x15, 0x3),
2831 UNCORE_EVENT_CONSTRAINT(0x1f, 0x3),
2832 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
2833 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
2834 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
2835 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
2836 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
2837 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
2838 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
2839 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
2840 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
2841 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
2842 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
2843 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
2844 UNCORE_EVENT_CONSTRAINT(0x31, 0x3),
2845 UNCORE_EVENT_CONSTRAINT(0x32, 0x3),
2846 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
2847 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
2848 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
2849 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
2850 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
2851 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
2852 EVENT_CONSTRAINT_END
2853};
2854
2855static struct intel_uncore_type hswep_uncore_r3qpi = {
2856 .name = "r3qpi",
2857 .num_counters = 3,
2858 .num_boxes = 3,
2859 .perf_ctr_bits = 44,
2860 .constraints = hswep_uncore_r3qpi_constraints,
2861 SNBEP_UNCORE_PCI_COMMON_INIT(),
2862};
2863
2864enum {
2865 HSWEP_PCI_UNCORE_HA,
2866 HSWEP_PCI_UNCORE_IMC,
2867 HSWEP_PCI_UNCORE_IRP,
2868 HSWEP_PCI_UNCORE_QPI,
2869 HSWEP_PCI_UNCORE_R2PCIE,
2870 HSWEP_PCI_UNCORE_R3QPI,
2871};
2872
2873static struct intel_uncore_type *hswep_pci_uncores[] = {
2874 [HSWEP_PCI_UNCORE_HA] = &hswep_uncore_ha,
2875 [HSWEP_PCI_UNCORE_IMC] = &hswep_uncore_imc,
2876 [HSWEP_PCI_UNCORE_IRP] = &hswep_uncore_irp,
2877 [HSWEP_PCI_UNCORE_QPI] = &hswep_uncore_qpi,
2878 [HSWEP_PCI_UNCORE_R2PCIE] = &hswep_uncore_r2pcie,
2879 [HSWEP_PCI_UNCORE_R3QPI] = &hswep_uncore_r3qpi,
2880 NULL,
2881};
2882
2883static const struct pci_device_id hswep_uncore_pci_ids[] = {
2884 {
2885 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f30),
2886 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 0),
2887 },
2888 {
2889 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f38),
2890 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_HA, 1),
2891 },
2892 {
2893 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb0),
2894 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 0),
2895 },
2896 {
2897 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb1),
2898 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 1),
2899 },
2900 {
2901 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb4),
2902 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 2),
2903 },
2904 {
2905 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fb5),
2906 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 3),
2907 },
2908 {
2909 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd0),
2910 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 4),
2911 },
2912 {
2913 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd1),
2914 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 5),
2915 },
2916 {
2917 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd4),
2918 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 6),
2919 },
2920 {
2921 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fd5),
2922 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IMC, 7),
2923 },
2924 {
2925 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f39),
2926 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_IRP, 0),
2927 },
2928 {
2929 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f32),
2930 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 0),
2931 },
2932 {
2933 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f33),
2934 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 1),
2935 },
2936 {
2937 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3a),
2938 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_QPI, 2),
2939 },
2940 {
2941 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f34),
2942 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R2PCIE, 0),
2943 },
2944 {
2945 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f36),
2946 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 0),
2947 },
2948 {
2949 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f37),
2950 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 1),
2951 },
2952 {
2953 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f3e),
2954 .driver_data = UNCORE_PCI_DEV_DATA(HSWEP_PCI_UNCORE_R3QPI, 2),
2955 },
2956 {
2957 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f86),
2958 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
2959 SNBEP_PCI_QPI_PORT0_FILTER),
2960 },
2961 {
2962 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2f96),
2963 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
2964 SNBEP_PCI_QPI_PORT1_FILTER),
2965 },
2966 {
2967 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2fc0),
2968 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
2969 HSWEP_PCI_PCU_3),
2970 },
2971 { }
2972};
2973
2974static struct pci_driver hswep_uncore_pci_driver = {
2975 .name = "hswep_uncore",
2976 .id_table = hswep_uncore_pci_ids,
2977};
2978
2979int hswep_uncore_pci_init(void)
2980{
2981 int ret = snbep_pci2phy_map_init(0x2f1e, SNBEP_CPUNODEID, SNBEP_GIDNIDMAP, true);
2982 if (ret)
2983 return ret;
2984 uncore_pci_uncores = hswep_pci_uncores;
2985 uncore_pci_driver = &hswep_uncore_pci_driver;
2986 return 0;
2987}
2988
2989
2990
2991
2992static struct intel_uncore_type bdx_uncore_ubox = {
2993 .name = "ubox",
2994 .num_counters = 2,
2995 .num_boxes = 1,
2996 .perf_ctr_bits = 48,
2997 .fixed_ctr_bits = 48,
2998 .perf_ctr = HSWEP_U_MSR_PMON_CTR0,
2999 .event_ctl = HSWEP_U_MSR_PMON_CTL0,
3000 .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK,
3001 .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR,
3002 .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL,
3003 .num_shared_regs = 1,
3004 .ops = &ivbep_uncore_msr_ops,
3005 .format_group = &ivbep_uncore_ubox_format_group,
3006};
3007
3008static struct event_constraint bdx_uncore_cbox_constraints[] = {
3009 UNCORE_EVENT_CONSTRAINT(0x09, 0x3),
3010 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
3011 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
3012 UNCORE_EVENT_CONSTRAINT(0x3e, 0x1),
3013 EVENT_CONSTRAINT_END
3014};
3015
3016static struct intel_uncore_type bdx_uncore_cbox = {
3017 .name = "cbox",
3018 .num_counters = 4,
3019 .num_boxes = 24,
3020 .perf_ctr_bits = 48,
3021 .event_ctl = HSWEP_C0_MSR_PMON_CTL0,
3022 .perf_ctr = HSWEP_C0_MSR_PMON_CTR0,
3023 .event_mask = SNBEP_CBO_MSR_PMON_RAW_EVENT_MASK,
3024 .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL,
3025 .msr_offset = HSWEP_CBO_MSR_OFFSET,
3026 .num_shared_regs = 1,
3027 .constraints = bdx_uncore_cbox_constraints,
3028 .ops = &hswep_uncore_cbox_ops,
3029 .format_group = &hswep_uncore_cbox_format_group,
3030};
3031
3032static struct intel_uncore_type bdx_uncore_sbox = {
3033 .name = "sbox",
3034 .num_counters = 4,
3035 .num_boxes = 4,
3036 .perf_ctr_bits = 48,
3037 .event_ctl = HSWEP_S0_MSR_PMON_CTL0,
3038 .perf_ctr = HSWEP_S0_MSR_PMON_CTR0,
3039 .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK,
3040 .box_ctl = HSWEP_S0_MSR_PMON_BOX_CTL,
3041 .msr_offset = HSWEP_SBOX_MSR_OFFSET,
3042 .ops = &hswep_uncore_sbox_msr_ops,
3043 .format_group = &hswep_uncore_sbox_format_group,
3044};
3045
3046#define BDX_MSR_UNCORE_SBOX 3
3047
3048static struct intel_uncore_type *bdx_msr_uncores[] = {
3049 &bdx_uncore_ubox,
3050 &bdx_uncore_cbox,
3051 &hswep_uncore_pcu,
3052 &bdx_uncore_sbox,
3053 NULL,
3054};
3055
3056
3057static struct event_constraint bdx_uncore_pcu_constraints[] = {
3058 EVENT_CONSTRAINT(0x80, 0xe, 0x80),
3059 EVENT_CONSTRAINT_END
3060};
3061
3062void bdx_uncore_cpu_init(void)
3063{
3064 int pkg = topology_phys_to_logical_pkg(0);
3065
3066 if (bdx_uncore_cbox.num_boxes > boot_cpu_data.x86_max_cores)
3067 bdx_uncore_cbox.num_boxes = boot_cpu_data.x86_max_cores;
3068 uncore_msr_uncores = bdx_msr_uncores;
3069
3070
3071 if (boot_cpu_data.x86_model == 86) {
3072 uncore_msr_uncores[BDX_MSR_UNCORE_SBOX] = NULL;
3073
3074 } else if (uncore_extra_pci_dev[pkg].dev[HSWEP_PCI_PCU_3]) {
3075 struct pci_dev *pdev;
3076 u32 capid4;
3077
3078 pdev = uncore_extra_pci_dev[pkg].dev[HSWEP_PCI_PCU_3];
3079 pci_read_config_dword(pdev, 0x94, &capid4);
3080 if (((capid4 >> 6) & 0x3) == 0)
3081 bdx_msr_uncores[BDX_MSR_UNCORE_SBOX] = NULL;
3082 }
3083 hswep_uncore_pcu.constraints = bdx_uncore_pcu_constraints;
3084}
3085
3086static struct intel_uncore_type bdx_uncore_ha = {
3087 .name = "ha",
3088 .num_counters = 4,
3089 .num_boxes = 2,
3090 .perf_ctr_bits = 48,
3091 SNBEP_UNCORE_PCI_COMMON_INIT(),
3092};
3093
3094static struct intel_uncore_type bdx_uncore_imc = {
3095 .name = "imc",
3096 .num_counters = 4,
3097 .num_boxes = 8,
3098 .perf_ctr_bits = 48,
3099 .fixed_ctr_bits = 48,
3100 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
3101 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
3102 .event_descs = hswep_uncore_imc_events,
3103 SNBEP_UNCORE_PCI_COMMON_INIT(),
3104};
3105
3106static struct intel_uncore_type bdx_uncore_irp = {
3107 .name = "irp",
3108 .num_counters = 4,
3109 .num_boxes = 1,
3110 .perf_ctr_bits = 48,
3111 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
3112 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
3113 .ops = &hswep_uncore_irp_ops,
3114 .format_group = &snbep_uncore_format_group,
3115};
3116
3117static struct intel_uncore_type bdx_uncore_qpi = {
3118 .name = "qpi",
3119 .num_counters = 4,
3120 .num_boxes = 3,
3121 .perf_ctr_bits = 48,
3122 .perf_ctr = SNBEP_PCI_PMON_CTR0,
3123 .event_ctl = SNBEP_PCI_PMON_CTL0,
3124 .event_mask = SNBEP_QPI_PCI_PMON_RAW_EVENT_MASK,
3125 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
3126 .num_shared_regs = 1,
3127 .ops = &snbep_uncore_qpi_ops,
3128 .format_group = &snbep_uncore_qpi_format_group,
3129};
3130
3131static struct event_constraint bdx_uncore_r2pcie_constraints[] = {
3132 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
3133 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
3134 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
3135 UNCORE_EVENT_CONSTRAINT(0x23, 0x1),
3136 UNCORE_EVENT_CONSTRAINT(0x25, 0x1),
3137 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
3138 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
3139 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
3140 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
3141 EVENT_CONSTRAINT_END
3142};
3143
3144static struct intel_uncore_type bdx_uncore_r2pcie = {
3145 .name = "r2pcie",
3146 .num_counters = 4,
3147 .num_boxes = 1,
3148 .perf_ctr_bits = 48,
3149 .constraints = bdx_uncore_r2pcie_constraints,
3150 SNBEP_UNCORE_PCI_COMMON_INIT(),
3151};
3152
3153static struct event_constraint bdx_uncore_r3qpi_constraints[] = {
3154 UNCORE_EVENT_CONSTRAINT(0x01, 0x7),
3155 UNCORE_EVENT_CONSTRAINT(0x07, 0x7),
3156 UNCORE_EVENT_CONSTRAINT(0x08, 0x7),
3157 UNCORE_EVENT_CONSTRAINT(0x09, 0x7),
3158 UNCORE_EVENT_CONSTRAINT(0x0a, 0x7),
3159 UNCORE_EVENT_CONSTRAINT(0x0e, 0x7),
3160 UNCORE_EVENT_CONSTRAINT(0x10, 0x3),
3161 UNCORE_EVENT_CONSTRAINT(0x11, 0x3),
3162 UNCORE_EVENT_CONSTRAINT(0x13, 0x1),
3163 UNCORE_EVENT_CONSTRAINT(0x14, 0x3),
3164 UNCORE_EVENT_CONSTRAINT(0x15, 0x3),
3165 UNCORE_EVENT_CONSTRAINT(0x1f, 0x3),
3166 UNCORE_EVENT_CONSTRAINT(0x20, 0x3),
3167 UNCORE_EVENT_CONSTRAINT(0x21, 0x3),
3168 UNCORE_EVENT_CONSTRAINT(0x22, 0x3),
3169 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
3170 UNCORE_EVENT_CONSTRAINT(0x25, 0x3),
3171 UNCORE_EVENT_CONSTRAINT(0x26, 0x3),
3172 UNCORE_EVENT_CONSTRAINT(0x28, 0x3),
3173 UNCORE_EVENT_CONSTRAINT(0x29, 0x3),
3174 UNCORE_EVENT_CONSTRAINT(0x2c, 0x3),
3175 UNCORE_EVENT_CONSTRAINT(0x2d, 0x3),
3176 UNCORE_EVENT_CONSTRAINT(0x2e, 0x3),
3177 UNCORE_EVENT_CONSTRAINT(0x2f, 0x3),
3178 UNCORE_EVENT_CONSTRAINT(0x33, 0x3),
3179 UNCORE_EVENT_CONSTRAINT(0x34, 0x3),
3180 UNCORE_EVENT_CONSTRAINT(0x36, 0x3),
3181 UNCORE_EVENT_CONSTRAINT(0x37, 0x3),
3182 UNCORE_EVENT_CONSTRAINT(0x38, 0x3),
3183 UNCORE_EVENT_CONSTRAINT(0x39, 0x3),
3184 EVENT_CONSTRAINT_END
3185};
3186
3187static struct intel_uncore_type bdx_uncore_r3qpi = {
3188 .name = "r3qpi",
3189 .num_counters = 3,
3190 .num_boxes = 3,
3191 .perf_ctr_bits = 48,
3192 .constraints = bdx_uncore_r3qpi_constraints,
3193 SNBEP_UNCORE_PCI_COMMON_INIT(),
3194};
3195
3196enum {
3197 BDX_PCI_UNCORE_HA,
3198 BDX_PCI_UNCORE_IMC,
3199 BDX_PCI_UNCORE_IRP,
3200 BDX_PCI_UNCORE_QPI,
3201 BDX_PCI_UNCORE_R2PCIE,
3202 BDX_PCI_UNCORE_R3QPI,
3203};
3204
3205static struct intel_uncore_type *bdx_pci_uncores[] = {
3206 [BDX_PCI_UNCORE_HA] = &bdx_uncore_ha,
3207 [BDX_PCI_UNCORE_IMC] = &bdx_uncore_imc,
3208 [BDX_PCI_UNCORE_IRP] = &bdx_uncore_irp,
3209 [BDX_PCI_UNCORE_QPI] = &bdx_uncore_qpi,
3210 [BDX_PCI_UNCORE_R2PCIE] = &bdx_uncore_r2pcie,
3211 [BDX_PCI_UNCORE_R3QPI] = &bdx_uncore_r3qpi,
3212 NULL,
3213};
3214
3215static const struct pci_device_id bdx_uncore_pci_ids[] = {
3216 {
3217 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f30),
3218 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_HA, 0),
3219 },
3220 {
3221 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f38),
3222 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_HA, 1),
3223 },
3224 {
3225 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb0),
3226 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 0),
3227 },
3228 {
3229 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb1),
3230 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 1),
3231 },
3232 {
3233 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb4),
3234 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 2),
3235 },
3236 {
3237 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fb5),
3238 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 3),
3239 },
3240 {
3241 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd0),
3242 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 4),
3243 },
3244 {
3245 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd1),
3246 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 5),
3247 },
3248 {
3249 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd4),
3250 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 6),
3251 },
3252 {
3253 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fd5),
3254 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IMC, 7),
3255 },
3256 {
3257 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f39),
3258 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_IRP, 0),
3259 },
3260 {
3261 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f32),
3262 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_QPI, 0),
3263 },
3264 {
3265 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f33),
3266 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_QPI, 1),
3267 },
3268 {
3269 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f3a),
3270 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_QPI, 2),
3271 },
3272 {
3273 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f34),
3274 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R2PCIE, 0),
3275 },
3276 {
3277 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f36),
3278 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R3QPI, 0),
3279 },
3280 {
3281 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f37),
3282 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R3QPI, 1),
3283 },
3284 {
3285 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f3e),
3286 .driver_data = UNCORE_PCI_DEV_DATA(BDX_PCI_UNCORE_R3QPI, 2),
3287 },
3288 {
3289 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f86),
3290 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
3291 SNBEP_PCI_QPI_PORT0_FILTER),
3292 },
3293 {
3294 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f96),
3295 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
3296 SNBEP_PCI_QPI_PORT1_FILTER),
3297 },
3298 {
3299 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6f46),
3300 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
3301 BDX_PCI_QPI_PORT2_FILTER),
3302 },
3303 {
3304 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x6fc0),
3305 .driver_data = UNCORE_PCI_DEV_DATA(UNCORE_EXTRA_PCI_DEV,
3306 HSWEP_PCI_PCU_3),
3307 },
3308 { }
3309};
3310
3311static struct pci_driver bdx_uncore_pci_driver = {
3312 .name = "bdx_uncore",
3313 .id_table = bdx_uncore_pci_ids,
3314};
3315
3316int bdx_uncore_pci_init(void)
3317{
3318 int ret = snbep_pci2phy_map_init(0x6f1e, SNBEP_CPUNODEID, SNBEP_GIDNIDMAP, true);
3319
3320 if (ret)
3321 return ret;
3322 uncore_pci_uncores = bdx_pci_uncores;
3323 uncore_pci_driver = &bdx_uncore_pci_driver;
3324 return 0;
3325}
3326
3327
3328
3329
3330
3331static struct intel_uncore_type skx_uncore_ubox = {
3332 .name = "ubox",
3333 .num_counters = 2,
3334 .num_boxes = 1,
3335 .perf_ctr_bits = 48,
3336 .fixed_ctr_bits = 48,
3337 .perf_ctr = HSWEP_U_MSR_PMON_CTR0,
3338 .event_ctl = HSWEP_U_MSR_PMON_CTL0,
3339 .event_mask = SNBEP_U_MSR_PMON_RAW_EVENT_MASK,
3340 .fixed_ctr = HSWEP_U_MSR_PMON_UCLK_FIXED_CTR,
3341 .fixed_ctl = HSWEP_U_MSR_PMON_UCLK_FIXED_CTL,
3342 .ops = &ivbep_uncore_msr_ops,
3343 .format_group = &ivbep_uncore_ubox_format_group,
3344};
3345
3346static struct attribute *skx_uncore_cha_formats_attr[] = {
3347 &format_attr_event.attr,
3348 &format_attr_umask.attr,
3349 &format_attr_edge.attr,
3350 &format_attr_tid_en.attr,
3351 &format_attr_inv.attr,
3352 &format_attr_thresh8.attr,
3353 &format_attr_filter_tid4.attr,
3354 &format_attr_filter_state5.attr,
3355 &format_attr_filter_rem.attr,
3356 &format_attr_filter_loc.attr,
3357 &format_attr_filter_nm.attr,
3358 &format_attr_filter_all_op.attr,
3359 &format_attr_filter_not_nm.attr,
3360 &format_attr_filter_opc_0.attr,
3361 &format_attr_filter_opc_1.attr,
3362 &format_attr_filter_nc.attr,
3363 &format_attr_filter_isoc.attr,
3364 NULL,
3365};
3366
3367static const struct attribute_group skx_uncore_chabox_format_group = {
3368 .name = "format",
3369 .attrs = skx_uncore_cha_formats_attr,
3370};
3371
3372static struct event_constraint skx_uncore_chabox_constraints[] = {
3373 UNCORE_EVENT_CONSTRAINT(0x11, 0x1),
3374 UNCORE_EVENT_CONSTRAINT(0x36, 0x1),
3375 EVENT_CONSTRAINT_END
3376};
3377
3378static struct extra_reg skx_uncore_cha_extra_regs[] = {
3379 SNBEP_CBO_EVENT_EXTRA_REG(0x0334, 0xffff, 0x4),
3380 SNBEP_CBO_EVENT_EXTRA_REG(0x0534, 0xffff, 0x4),
3381 SNBEP_CBO_EVENT_EXTRA_REG(0x0934, 0xffff, 0x4),
3382 SNBEP_CBO_EVENT_EXTRA_REG(0x1134, 0xffff, 0x4),
3383 SNBEP_CBO_EVENT_EXTRA_REG(0x3134, 0xffff, 0x4),
3384 SNBEP_CBO_EVENT_EXTRA_REG(0x9134, 0xffff, 0x4),
3385 SNBEP_CBO_EVENT_EXTRA_REG(0x35, 0xff, 0x8),
3386 SNBEP_CBO_EVENT_EXTRA_REG(0x36, 0xff, 0x8),
3387 SNBEP_CBO_EVENT_EXTRA_REG(0x38, 0xff, 0x3),
3388 EVENT_EXTRA_END
3389};
3390
3391static u64 skx_cha_filter_mask(int fields)
3392{
3393 u64 mask = 0;
3394
3395 if (fields & 0x1)
3396 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_TID;
3397 if (fields & 0x2)
3398 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_LINK;
3399 if (fields & 0x4)
3400 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_STATE;
3401 if (fields & 0x8) {
3402 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_REM;
3403 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_LOC;
3404 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_ALL_OPC;
3405 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_NM;
3406 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_NOT_NM;
3407 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_OPC0;
3408 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_OPC1;
3409 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_NC;
3410 mask |= SKX_CHA_MSR_PMON_BOX_FILTER_ISOC;
3411 }
3412 return mask;
3413}
3414
3415static struct event_constraint *
3416skx_cha_get_constraint(struct intel_uncore_box *box, struct perf_event *event)
3417{
3418 return __snbep_cbox_get_constraint(box, event, skx_cha_filter_mask);
3419}
3420
3421static int skx_cha_hw_config(struct intel_uncore_box *box, struct perf_event *event)
3422{
3423 struct hw_perf_event_extra *reg1 = &event->hw.extra_reg;
3424 struct extra_reg *er;
3425 int idx = 0;
3426
3427 for (er = skx_uncore_cha_extra_regs; er->msr; er++) {
3428 if (er->event != (event->hw.config & er->config_mask))
3429 continue;
3430 idx |= er->idx;
3431 }
3432
3433 if (idx) {
3434 reg1->reg = HSWEP_C0_MSR_PMON_BOX_FILTER0 +
3435 HSWEP_CBO_MSR_OFFSET * box->pmu->pmu_idx;
3436 reg1->config = event->attr.config1 & skx_cha_filter_mask(idx);
3437 reg1->idx = idx;
3438 }
3439 return 0;
3440}
3441
3442static struct intel_uncore_ops skx_uncore_chabox_ops = {
3443
3444 .init_box = ivbep_uncore_msr_init_box,
3445 .disable_box = snbep_uncore_msr_disable_box,
3446 .enable_box = snbep_uncore_msr_enable_box,
3447 .disable_event = snbep_uncore_msr_disable_event,
3448 .enable_event = hswep_cbox_enable_event,
3449 .read_counter = uncore_msr_read_counter,
3450 .hw_config = skx_cha_hw_config,
3451 .get_constraint = skx_cha_get_constraint,
3452 .put_constraint = snbep_cbox_put_constraint,
3453};
3454
3455static struct intel_uncore_type skx_uncore_chabox = {
3456 .name = "cha",
3457 .num_counters = 4,
3458 .perf_ctr_bits = 48,
3459 .event_ctl = HSWEP_C0_MSR_PMON_CTL0,
3460 .perf_ctr = HSWEP_C0_MSR_PMON_CTR0,
3461 .event_mask = HSWEP_S_MSR_PMON_RAW_EVENT_MASK,
3462 .box_ctl = HSWEP_C0_MSR_PMON_BOX_CTL,
3463 .msr_offset = HSWEP_CBO_MSR_OFFSET,
3464 .num_shared_regs = 1,
3465 .constraints = skx_uncore_chabox_constraints,
3466 .ops = &skx_uncore_chabox_ops,
3467 .format_group = &skx_uncore_chabox_format_group,
3468};
3469
3470static struct attribute *skx_uncore_iio_formats_attr[] = {
3471 &format_attr_event.attr,
3472 &format_attr_umask.attr,
3473 &format_attr_edge.attr,
3474 &format_attr_inv.attr,
3475 &format_attr_thresh9.attr,
3476 &format_attr_ch_mask.attr,
3477 &format_attr_fc_mask.attr,
3478 NULL,
3479};
3480
3481static const struct attribute_group skx_uncore_iio_format_group = {
3482 .name = "format",
3483 .attrs = skx_uncore_iio_formats_attr,
3484};
3485
3486static struct event_constraint skx_uncore_iio_constraints[] = {
3487 UNCORE_EVENT_CONSTRAINT(0x83, 0x3),
3488 UNCORE_EVENT_CONSTRAINT(0x88, 0xc),
3489 UNCORE_EVENT_CONSTRAINT(0x95, 0xc),
3490 UNCORE_EVENT_CONSTRAINT(0xc0, 0xc),
3491 UNCORE_EVENT_CONSTRAINT(0xc5, 0xc),
3492 UNCORE_EVENT_CONSTRAINT(0xd4, 0xc),
3493 EVENT_CONSTRAINT_END
3494};
3495
3496static void skx_iio_enable_event(struct intel_uncore_box *box,
3497 struct perf_event *event)
3498{
3499 struct hw_perf_event *hwc = &event->hw;
3500
3501 wrmsrl(hwc->config_base, hwc->config | SNBEP_PMON_CTL_EN);
3502}
3503
3504static struct intel_uncore_ops skx_uncore_iio_ops = {
3505 .init_box = ivbep_uncore_msr_init_box,
3506 .disable_box = snbep_uncore_msr_disable_box,
3507 .enable_box = snbep_uncore_msr_enable_box,
3508 .disable_event = snbep_uncore_msr_disable_event,
3509 .enable_event = skx_iio_enable_event,
3510 .read_counter = uncore_msr_read_counter,
3511};
3512
3513static struct intel_uncore_type skx_uncore_iio = {
3514 .name = "iio",
3515 .num_counters = 4,
3516 .num_boxes = 6,
3517 .perf_ctr_bits = 48,
3518 .event_ctl = SKX_IIO0_MSR_PMON_CTL0,
3519 .perf_ctr = SKX_IIO0_MSR_PMON_CTR0,
3520 .event_mask = SKX_IIO_PMON_RAW_EVENT_MASK,
3521 .event_mask_ext = SKX_IIO_PMON_RAW_EVENT_MASK_EXT,
3522 .box_ctl = SKX_IIO0_MSR_PMON_BOX_CTL,
3523 .msr_offset = SKX_IIO_MSR_OFFSET,
3524 .constraints = skx_uncore_iio_constraints,
3525 .ops = &skx_uncore_iio_ops,
3526 .format_group = &skx_uncore_iio_format_group,
3527};
3528
3529enum perf_uncore_iio_freerunning_type_id {
3530 SKX_IIO_MSR_IOCLK = 0,
3531 SKX_IIO_MSR_BW = 1,
3532 SKX_IIO_MSR_UTIL = 2,
3533
3534 SKX_IIO_FREERUNNING_TYPE_MAX,
3535};
3536
3537
3538static struct freerunning_counters skx_iio_freerunning[] = {
3539 [SKX_IIO_MSR_IOCLK] = { 0xa45, 0x1, 0x20, 1, 36 },
3540 [SKX_IIO_MSR_BW] = { 0xb00, 0x1, 0x10, 8, 36 },
3541 [SKX_IIO_MSR_UTIL] = { 0xb08, 0x1, 0x10, 8, 36 },
3542};
3543
3544static struct uncore_event_desc skx_uncore_iio_freerunning_events[] = {
3545
3546 INTEL_UNCORE_EVENT_DESC(ioclk, "event=0xff,umask=0x10"),
3547
3548 INTEL_UNCORE_EVENT_DESC(bw_in_port0, "event=0xff,umask=0x20"),
3549 INTEL_UNCORE_EVENT_DESC(bw_in_port0.scale, "3.814697266e-6"),
3550 INTEL_UNCORE_EVENT_DESC(bw_in_port0.unit, "MiB"),
3551 INTEL_UNCORE_EVENT_DESC(bw_in_port1, "event=0xff,umask=0x21"),
3552 INTEL_UNCORE_EVENT_DESC(bw_in_port1.scale, "3.814697266e-6"),
3553 INTEL_UNCORE_EVENT_DESC(bw_in_port1.unit, "MiB"),
3554 INTEL_UNCORE_EVENT_DESC(bw_in_port2, "event=0xff,umask=0x22"),
3555 INTEL_UNCORE_EVENT_DESC(bw_in_port2.scale, "3.814697266e-6"),
3556 INTEL_UNCORE_EVENT_DESC(bw_in_port2.unit, "MiB"),
3557 INTEL_UNCORE_EVENT_DESC(bw_in_port3, "event=0xff,umask=0x23"),
3558 INTEL_UNCORE_EVENT_DESC(bw_in_port3.scale, "3.814697266e-6"),
3559 INTEL_UNCORE_EVENT_DESC(bw_in_port3.unit, "MiB"),
3560 INTEL_UNCORE_EVENT_DESC(bw_out_port0, "event=0xff,umask=0x24"),
3561 INTEL_UNCORE_EVENT_DESC(bw_out_port0.scale, "3.814697266e-6"),
3562 INTEL_UNCORE_EVENT_DESC(bw_out_port0.unit, "MiB"),
3563 INTEL_UNCORE_EVENT_DESC(bw_out_port1, "event=0xff,umask=0x25"),
3564 INTEL_UNCORE_EVENT_DESC(bw_out_port1.scale, "3.814697266e-6"),
3565 INTEL_UNCORE_EVENT_DESC(bw_out_port1.unit, "MiB"),
3566 INTEL_UNCORE_EVENT_DESC(bw_out_port2, "event=0xff,umask=0x26"),
3567 INTEL_UNCORE_EVENT_DESC(bw_out_port2.scale, "3.814697266e-6"),
3568 INTEL_UNCORE_EVENT_DESC(bw_out_port2.unit, "MiB"),
3569 INTEL_UNCORE_EVENT_DESC(bw_out_port3, "event=0xff,umask=0x27"),
3570 INTEL_UNCORE_EVENT_DESC(bw_out_port3.scale, "3.814697266e-6"),
3571 INTEL_UNCORE_EVENT_DESC(bw_out_port3.unit, "MiB"),
3572
3573 INTEL_UNCORE_EVENT_DESC(util_in_port0, "event=0xff,umask=0x30"),
3574 INTEL_UNCORE_EVENT_DESC(util_out_port0, "event=0xff,umask=0x31"),
3575 INTEL_UNCORE_EVENT_DESC(util_in_port1, "event=0xff,umask=0x32"),
3576 INTEL_UNCORE_EVENT_DESC(util_out_port1, "event=0xff,umask=0x33"),
3577 INTEL_UNCORE_EVENT_DESC(util_in_port2, "event=0xff,umask=0x34"),
3578 INTEL_UNCORE_EVENT_DESC(util_out_port2, "event=0xff,umask=0x35"),
3579 INTEL_UNCORE_EVENT_DESC(util_in_port3, "event=0xff,umask=0x36"),
3580 INTEL_UNCORE_EVENT_DESC(util_out_port3, "event=0xff,umask=0x37"),
3581 { },
3582};
3583
3584static struct intel_uncore_ops skx_uncore_iio_freerunning_ops = {
3585 .read_counter = uncore_msr_read_counter,
3586};
3587
3588static struct attribute *skx_uncore_iio_freerunning_formats_attr[] = {
3589 &format_attr_event.attr,
3590 &format_attr_umask.attr,
3591 NULL,
3592};
3593
3594static const struct attribute_group skx_uncore_iio_freerunning_format_group = {
3595 .name = "format",
3596 .attrs = skx_uncore_iio_freerunning_formats_attr,
3597};
3598
3599static struct intel_uncore_type skx_uncore_iio_free_running = {
3600 .name = "iio_free_running",
3601 .num_counters = 17,
3602 .num_boxes = 6,
3603 .num_freerunning_types = SKX_IIO_FREERUNNING_TYPE_MAX,
3604 .freerunning = skx_iio_freerunning,
3605 .ops = &skx_uncore_iio_freerunning_ops,
3606 .event_descs = skx_uncore_iio_freerunning_events,
3607 .format_group = &skx_uncore_iio_freerunning_format_group,
3608};
3609
3610static struct attribute *skx_uncore_formats_attr[] = {
3611 &format_attr_event.attr,
3612 &format_attr_umask.attr,
3613 &format_attr_edge.attr,
3614 &format_attr_inv.attr,
3615 &format_attr_thresh8.attr,
3616 NULL,
3617};
3618
3619static const struct attribute_group skx_uncore_format_group = {
3620 .name = "format",
3621 .attrs = skx_uncore_formats_attr,
3622};
3623
3624static struct intel_uncore_type skx_uncore_irp = {
3625 .name = "irp",
3626 .num_counters = 2,
3627 .num_boxes = 6,
3628 .perf_ctr_bits = 48,
3629 .event_ctl = SKX_IRP0_MSR_PMON_CTL0,
3630 .perf_ctr = SKX_IRP0_MSR_PMON_CTR0,
3631 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
3632 .box_ctl = SKX_IRP0_MSR_PMON_BOX_CTL,
3633 .msr_offset = SKX_IRP_MSR_OFFSET,
3634 .ops = &skx_uncore_iio_ops,
3635 .format_group = &skx_uncore_format_group,
3636};
3637
3638static struct attribute *skx_uncore_pcu_formats_attr[] = {
3639 &format_attr_event.attr,
3640 &format_attr_umask.attr,
3641 &format_attr_edge.attr,
3642 &format_attr_inv.attr,
3643 &format_attr_thresh8.attr,
3644 &format_attr_occ_invert.attr,
3645 &format_attr_occ_edge_det.attr,
3646 &format_attr_filter_band0.attr,
3647 &format_attr_filter_band1.attr,
3648 &format_attr_filter_band2.attr,
3649 &format_attr_filter_band3.attr,
3650 NULL,
3651};
3652
3653static struct attribute_group skx_uncore_pcu_format_group = {
3654 .name = "format",
3655 .attrs = skx_uncore_pcu_formats_attr,
3656};
3657
3658static struct intel_uncore_ops skx_uncore_pcu_ops = {
3659 IVBEP_UNCORE_MSR_OPS_COMMON_INIT(),
3660 .hw_config = hswep_pcu_hw_config,
3661 .get_constraint = snbep_pcu_get_constraint,
3662 .put_constraint = snbep_pcu_put_constraint,
3663};
3664
3665static struct intel_uncore_type skx_uncore_pcu = {
3666 .name = "pcu",
3667 .num_counters = 4,
3668 .num_boxes = 1,
3669 .perf_ctr_bits = 48,
3670 .perf_ctr = HSWEP_PCU_MSR_PMON_CTR0,
3671 .event_ctl = HSWEP_PCU_MSR_PMON_CTL0,
3672 .event_mask = SNBEP_PCU_MSR_PMON_RAW_EVENT_MASK,
3673 .box_ctl = HSWEP_PCU_MSR_PMON_BOX_CTL,
3674 .num_shared_regs = 1,
3675 .ops = &skx_uncore_pcu_ops,
3676 .format_group = &skx_uncore_pcu_format_group,
3677};
3678
3679static struct intel_uncore_type *skx_msr_uncores[] = {
3680 &skx_uncore_ubox,
3681 &skx_uncore_chabox,
3682 &skx_uncore_iio,
3683 &skx_uncore_iio_free_running,
3684 &skx_uncore_irp,
3685 &skx_uncore_pcu,
3686 NULL,
3687};
3688
3689
3690
3691
3692
3693#define SKX_CAPID6 0x9c
3694#define SKX_CHA_BIT_MASK GENMASK(27, 0)
3695
3696static int skx_count_chabox(void)
3697{
3698 struct pci_dev *dev = NULL;
3699 u32 val = 0;
3700
3701 dev = pci_get_device(PCI_VENDOR_ID_INTEL, 0x2083, dev);
3702 if (!dev)
3703 goto out;
3704
3705 pci_read_config_dword(dev, SKX_CAPID6, &val);
3706 val &= SKX_CHA_BIT_MASK;
3707out:
3708 pci_dev_put(dev);
3709 return hweight32(val);
3710}
3711
3712void skx_uncore_cpu_init(void)
3713{
3714 skx_uncore_chabox.num_boxes = skx_count_chabox();
3715 uncore_msr_uncores = skx_msr_uncores;
3716}
3717
3718static struct intel_uncore_type skx_uncore_imc = {
3719 .name = "imc",
3720 .num_counters = 4,
3721 .num_boxes = 6,
3722 .perf_ctr_bits = 48,
3723 .fixed_ctr_bits = 48,
3724 .fixed_ctr = SNBEP_MC_CHy_PCI_PMON_FIXED_CTR,
3725 .fixed_ctl = SNBEP_MC_CHy_PCI_PMON_FIXED_CTL,
3726 .event_descs = hswep_uncore_imc_events,
3727 .perf_ctr = SNBEP_PCI_PMON_CTR0,
3728 .event_ctl = SNBEP_PCI_PMON_CTL0,
3729 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
3730 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
3731 .ops = &ivbep_uncore_pci_ops,
3732 .format_group = &skx_uncore_format_group,
3733};
3734
3735static struct attribute *skx_upi_uncore_formats_attr[] = {
3736 &format_attr_event.attr,
3737 &format_attr_umask_ext.attr,
3738 &format_attr_edge.attr,
3739 &format_attr_inv.attr,
3740 &format_attr_thresh8.attr,
3741 NULL,
3742};
3743
3744static const struct attribute_group skx_upi_uncore_format_group = {
3745 .name = "format",
3746 .attrs = skx_upi_uncore_formats_attr,
3747};
3748
3749static void skx_upi_uncore_pci_init_box(struct intel_uncore_box *box)
3750{
3751 struct pci_dev *pdev = box->pci_dev;
3752
3753 __set_bit(UNCORE_BOX_FLAG_CTL_OFFS8, &box->flags);
3754 pci_write_config_dword(pdev, SKX_UPI_PCI_PMON_BOX_CTL, IVBEP_PMON_BOX_CTL_INT);
3755}
3756
3757static struct intel_uncore_ops skx_upi_uncore_pci_ops = {
3758 .init_box = skx_upi_uncore_pci_init_box,
3759 .disable_box = snbep_uncore_pci_disable_box,
3760 .enable_box = snbep_uncore_pci_enable_box,
3761 .disable_event = snbep_uncore_pci_disable_event,
3762 .enable_event = snbep_uncore_pci_enable_event,
3763 .read_counter = snbep_uncore_pci_read_counter,
3764};
3765
3766static struct intel_uncore_type skx_uncore_upi = {
3767 .name = "upi",
3768 .num_counters = 4,
3769 .num_boxes = 3,
3770 .perf_ctr_bits = 48,
3771 .perf_ctr = SKX_UPI_PCI_PMON_CTR0,
3772 .event_ctl = SKX_UPI_PCI_PMON_CTL0,
3773 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
3774 .event_mask_ext = SKX_UPI_CTL_UMASK_EXT,
3775 .box_ctl = SKX_UPI_PCI_PMON_BOX_CTL,
3776 .ops = &skx_upi_uncore_pci_ops,
3777 .format_group = &skx_upi_uncore_format_group,
3778};
3779
3780static void skx_m2m_uncore_pci_init_box(struct intel_uncore_box *box)
3781{
3782 struct pci_dev *pdev = box->pci_dev;
3783
3784 __set_bit(UNCORE_BOX_FLAG_CTL_OFFS8, &box->flags);
3785 pci_write_config_dword(pdev, SKX_M2M_PCI_PMON_BOX_CTL, IVBEP_PMON_BOX_CTL_INT);
3786}
3787
3788static struct intel_uncore_ops skx_m2m_uncore_pci_ops = {
3789 .init_box = skx_m2m_uncore_pci_init_box,
3790 .disable_box = snbep_uncore_pci_disable_box,
3791 .enable_box = snbep_uncore_pci_enable_box,
3792 .disable_event = snbep_uncore_pci_disable_event,
3793 .enable_event = snbep_uncore_pci_enable_event,
3794 .read_counter = snbep_uncore_pci_read_counter,
3795};
3796
3797static struct intel_uncore_type skx_uncore_m2m = {
3798 .name = "m2m",
3799 .num_counters = 4,
3800 .num_boxes = 2,
3801 .perf_ctr_bits = 48,
3802 .perf_ctr = SKX_M2M_PCI_PMON_CTR0,
3803 .event_ctl = SKX_M2M_PCI_PMON_CTL0,
3804 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
3805 .box_ctl = SKX_M2M_PCI_PMON_BOX_CTL,
3806 .ops = &skx_m2m_uncore_pci_ops,
3807 .format_group = &skx_uncore_format_group,
3808};
3809
3810static struct event_constraint skx_uncore_m2pcie_constraints[] = {
3811 UNCORE_EVENT_CONSTRAINT(0x23, 0x3),
3812 EVENT_CONSTRAINT_END
3813};
3814
3815static struct intel_uncore_type skx_uncore_m2pcie = {
3816 .name = "m2pcie",
3817 .num_counters = 4,
3818 .num_boxes = 4,
3819 .perf_ctr_bits = 48,
3820 .constraints = skx_uncore_m2pcie_constraints,
3821 .perf_ctr = SNBEP_PCI_PMON_CTR0,
3822 .event_ctl = SNBEP_PCI_PMON_CTL0,
3823 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
3824 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
3825 .ops = &ivbep_uncore_pci_ops,
3826 .format_group = &skx_uncore_format_group,
3827};
3828
3829static struct event_constraint skx_uncore_m3upi_constraints[] = {
3830 UNCORE_EVENT_CONSTRAINT(0x1d, 0x1),
3831 UNCORE_EVENT_CONSTRAINT(0x1e, 0x1),
3832 UNCORE_EVENT_CONSTRAINT(0x40, 0x7),
3833 UNCORE_EVENT_CONSTRAINT(0x4e, 0x7),
3834 UNCORE_EVENT_CONSTRAINT(0x4f, 0x7),
3835 UNCORE_EVENT_CONSTRAINT(0x50, 0x7),
3836 UNCORE_EVENT_CONSTRAINT(0x51, 0x7),
3837 UNCORE_EVENT_CONSTRAINT(0x52, 0x7),
3838 EVENT_CONSTRAINT_END
3839};
3840
3841static struct intel_uncore_type skx_uncore_m3upi = {
3842 .name = "m3upi",
3843 .num_counters = 3,
3844 .num_boxes = 3,
3845 .perf_ctr_bits = 48,
3846 .constraints = skx_uncore_m3upi_constraints,
3847 .perf_ctr = SNBEP_PCI_PMON_CTR0,
3848 .event_ctl = SNBEP_PCI_PMON_CTL0,
3849 .event_mask = SNBEP_PMON_RAW_EVENT_MASK,
3850 .box_ctl = SNBEP_PCI_PMON_BOX_CTL,
3851 .ops = &ivbep_uncore_pci_ops,
3852 .format_group = &skx_uncore_format_group,
3853};
3854
3855enum {
3856 SKX_PCI_UNCORE_IMC,
3857 SKX_PCI_UNCORE_M2M,
3858 SKX_PCI_UNCORE_UPI,
3859 SKX_PCI_UNCORE_M2PCIE,
3860 SKX_PCI_UNCORE_M3UPI,
3861};
3862
3863static struct intel_uncore_type *skx_pci_uncores[] = {
3864 [SKX_PCI_UNCORE_IMC] = &skx_uncore_imc,
3865 [SKX_PCI_UNCORE_M2M] = &skx_uncore_m2m,
3866 [SKX_PCI_UNCORE_UPI] = &skx_uncore_upi,
3867 [SKX_PCI_UNCORE_M2PCIE] = &skx_uncore_m2pcie,
3868 [SKX_PCI_UNCORE_M3UPI] = &skx_uncore_m3upi,
3869 NULL,
3870};
3871
3872static const struct pci_device_id skx_uncore_pci_ids[] = {
3873 {
3874 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2042),
3875 .driver_data = UNCORE_PCI_DEV_FULL_DATA(10, 2, SKX_PCI_UNCORE_IMC, 0),
3876 },
3877 {
3878 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2046),
3879 .driver_data = UNCORE_PCI_DEV_FULL_DATA(10, 6, SKX_PCI_UNCORE_IMC, 1),
3880 },
3881 {
3882 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x204a),
3883 .driver_data = UNCORE_PCI_DEV_FULL_DATA(11, 2, SKX_PCI_UNCORE_IMC, 2),
3884 },
3885 {
3886 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2042),
3887 .driver_data = UNCORE_PCI_DEV_FULL_DATA(12, 2, SKX_PCI_UNCORE_IMC, 3),
3888 },
3889 {
3890 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2046),
3891 .driver_data = UNCORE_PCI_DEV_FULL_DATA(12, 6, SKX_PCI_UNCORE_IMC, 4),
3892 },
3893 {
3894 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x204a),
3895 .driver_data = UNCORE_PCI_DEV_FULL_DATA(13, 2, SKX_PCI_UNCORE_IMC, 5),
3896 },
3897 {
3898 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2066),
3899 .driver_data = UNCORE_PCI_DEV_FULL_DATA(8, 0, SKX_PCI_UNCORE_M2M, 0),
3900 },
3901 {
3902 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2066),
3903 .driver_data = UNCORE_PCI_DEV_FULL_DATA(9, 0, SKX_PCI_UNCORE_M2M, 1),
3904 },
3905 {
3906 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2058),
3907 .driver_data = UNCORE_PCI_DEV_FULL_DATA(14, 0, SKX_PCI_UNCORE_UPI, 0),
3908 },
3909 {
3910 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2058),
3911 .driver_data = UNCORE_PCI_DEV_FULL_DATA(15, 0, SKX_PCI_UNCORE_UPI, 1),
3912 },
3913 {
3914 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2058),
3915 .driver_data = UNCORE_PCI_DEV_FULL_DATA(16, 0, SKX_PCI_UNCORE_UPI, 2),
3916 },
3917 {
3918 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2088),
3919 .driver_data = UNCORE_PCI_DEV_FULL_DATA(21, 1, SKX_PCI_UNCORE_M2PCIE, 0),
3920 },
3921 {
3922 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2088),
3923 .driver_data = UNCORE_PCI_DEV_FULL_DATA(22, 1, SKX_PCI_UNCORE_M2PCIE, 1),
3924 },
3925 {
3926 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2088),
3927 .driver_data = UNCORE_PCI_DEV_FULL_DATA(23, 1, SKX_PCI_UNCORE_M2PCIE, 2),
3928 },
3929 {
3930 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x2088),
3931 .driver_data = UNCORE_PCI_DEV_FULL_DATA(21, 5, SKX_PCI_UNCORE_M2PCIE, 3),
3932 },
3933 {
3934 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x204C),
3935 .driver_data = UNCORE_PCI_DEV_FULL_DATA(18, 0, SKX_PCI_UNCORE_M3UPI, 0),
3936 },
3937 {
3938 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x204D),
3939 .driver_data = UNCORE_PCI_DEV_FULL_DATA(18, 1, SKX_PCI_UNCORE_M3UPI, 1),
3940 },
3941 {
3942 PCI_DEVICE(PCI_VENDOR_ID_INTEL, 0x204C),
3943 .driver_data = UNCORE_PCI_DEV_FULL_DATA(18, 4, SKX_PCI_UNCORE_M3UPI, 2),
3944 },
3945 { }
3946};
3947
3948
3949static struct pci_driver skx_uncore_pci_driver = {
3950 .name = "skx_uncore",
3951 .id_table = skx_uncore_pci_ids,
3952};
3953
3954int skx_uncore_pci_init(void)
3955{
3956
3957 int ret = snbep_pci2phy_map_init(0x2014, SKX_CPUNODEID, SKX_GIDNIDMAP, false);
3958
3959 if (ret)
3960 return ret;
3961
3962 uncore_pci_uncores = skx_pci_uncores;
3963 uncore_pci_driver = &skx_uncore_pci_driver;
3964 return 0;
3965}
3966
3967
3968