1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34#include "i915_drv.h"
35#include "gvt.h"
36
37enum {
38 INTEL_GVT_PCI_BAR_GTTMMIO = 0,
39 INTEL_GVT_PCI_BAR_APERTURE,
40 INTEL_GVT_PCI_BAR_PIO,
41 INTEL_GVT_PCI_BAR_MAX,
42};
43
44
45
46
47
48static const u8 pci_cfg_space_rw_bmp[PCI_INTERRUPT_LINE + 4] = {
49 [PCI_COMMAND] = 0xff, 0x07,
50 [PCI_STATUS] = 0x00, 0xf9,
51 [PCI_CACHE_LINE_SIZE] = 0xff,
52 [PCI_BASE_ADDRESS_0 ... PCI_CARDBUS_CIS - 1] = 0xff,
53 [PCI_ROM_ADDRESS] = 0x01, 0xf8, 0xff, 0xff,
54 [PCI_INTERRUPT_LINE] = 0xff,
55};
56
57
58
59
60
61
62
63
64
65
66
67
68static void vgpu_pci_cfg_mem_write(struct intel_vgpu *vgpu, unsigned int off,
69 u8 *src, unsigned int bytes)
70{
71 u8 *cfg_base = vgpu_cfg_space(vgpu);
72 u8 mask, new, old;
73 pci_power_t pwr;
74 int i = 0;
75
76 for (; i < bytes && (off + i < sizeof(pci_cfg_space_rw_bmp)); i++) {
77 mask = pci_cfg_space_rw_bmp[off + i];
78 old = cfg_base[off + i];
79 new = src[i] & mask;
80
81
82
83
84
85
86 if (off + i == PCI_STATUS + 1)
87 new = (~new & old) & mask;
88
89 cfg_base[off + i] = (old & ~mask) | new;
90 }
91
92
93 if (i < bytes)
94 memcpy(cfg_base + off + i, src + i, bytes - i);
95
96 if (off == vgpu->cfg_space.pmcsr_off && vgpu->cfg_space.pmcsr_off) {
97 pwr = (pci_power_t __force)(*(u16*)(&vgpu_cfg_space(vgpu)[off])
98 & PCI_PM_CTRL_STATE_MASK);
99 if (pwr == PCI_D3hot)
100 vgpu->d3_entered = true;
101 gvt_dbg_core("vgpu-%d power status changed to %d\n",
102 vgpu->id, pwr);
103 }
104}
105
106
107
108
109
110
111
112
113
114
115
116int intel_vgpu_emulate_cfg_read(struct intel_vgpu *vgpu, unsigned int offset,
117 void *p_data, unsigned int bytes)
118{
119 struct drm_i915_private *i915 = vgpu->gvt->gt->i915;
120
121 if (drm_WARN_ON(&i915->drm, bytes > 4))
122 return -EINVAL;
123
124 if (drm_WARN_ON(&i915->drm,
125 offset + bytes > vgpu->gvt->device_info.cfg_space_size))
126 return -EINVAL;
127
128 memcpy(p_data, vgpu_cfg_space(vgpu) + offset, bytes);
129 return 0;
130}
131
132static int map_aperture(struct intel_vgpu *vgpu, bool map)
133{
134 phys_addr_t aperture_pa = vgpu_aperture_pa_base(vgpu);
135 unsigned long aperture_sz = vgpu_aperture_sz(vgpu);
136 u64 first_gfn;
137 u64 val;
138 int ret;
139
140 if (map == vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].tracked)
141 return 0;
142
143 val = vgpu_cfg_space(vgpu)[PCI_BASE_ADDRESS_2];
144 if (val & PCI_BASE_ADDRESS_MEM_TYPE_64)
145 val = *(u64 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_2);
146 else
147 val = *(u32 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_2);
148
149 first_gfn = (val + vgpu_aperture_offset(vgpu)) >> PAGE_SHIFT;
150
151 ret = intel_gvt_hypervisor_map_gfn_to_mfn(vgpu, first_gfn,
152 aperture_pa >> PAGE_SHIFT,
153 aperture_sz >> PAGE_SHIFT,
154 map);
155 if (ret)
156 return ret;
157
158 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].tracked = map;
159 return 0;
160}
161
162static int trap_gttmmio(struct intel_vgpu *vgpu, bool trap)
163{
164 u64 start, end;
165 u64 val;
166 int ret;
167
168 if (trap == vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].tracked)
169 return 0;
170
171 val = vgpu_cfg_space(vgpu)[PCI_BASE_ADDRESS_0];
172 if (val & PCI_BASE_ADDRESS_MEM_TYPE_64)
173 start = *(u64 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_0);
174 else
175 start = *(u32 *)(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_0);
176
177 start &= ~GENMASK(3, 0);
178 end = start + vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].size - 1;
179
180 ret = intel_gvt_hypervisor_set_trap_area(vgpu, start, end, trap);
181 if (ret)
182 return ret;
183
184 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].tracked = trap;
185 return 0;
186}
187
188static int emulate_pci_command_write(struct intel_vgpu *vgpu,
189 unsigned int offset, void *p_data, unsigned int bytes)
190{
191 u8 old = vgpu_cfg_space(vgpu)[offset];
192 u8 new = *(u8 *)p_data;
193 u8 changed = old ^ new;
194 int ret;
195
196 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
197 if (!(changed & PCI_COMMAND_MEMORY))
198 return 0;
199
200 if (old & PCI_COMMAND_MEMORY) {
201 ret = trap_gttmmio(vgpu, false);
202 if (ret)
203 return ret;
204 ret = map_aperture(vgpu, false);
205 if (ret)
206 return ret;
207 } else {
208 ret = trap_gttmmio(vgpu, true);
209 if (ret)
210 return ret;
211 ret = map_aperture(vgpu, true);
212 if (ret)
213 return ret;
214 }
215
216 return 0;
217}
218
219static int emulate_pci_rom_bar_write(struct intel_vgpu *vgpu,
220 unsigned int offset, void *p_data, unsigned int bytes)
221{
222 u32 *pval = (u32 *)(vgpu_cfg_space(vgpu) + offset);
223 u32 new = *(u32 *)(p_data);
224
225 if ((new & PCI_ROM_ADDRESS_MASK) == PCI_ROM_ADDRESS_MASK)
226
227 *pval = 0;
228 else
229 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
230 return 0;
231}
232
233static int emulate_pci_bar_write(struct intel_vgpu *vgpu, unsigned int offset,
234 void *p_data, unsigned int bytes)
235{
236 u32 new = *(u32 *)(p_data);
237 bool lo = IS_ALIGNED(offset, 8);
238 u64 size;
239 int ret = 0;
240 bool mmio_enabled =
241 vgpu_cfg_space(vgpu)[PCI_COMMAND] & PCI_COMMAND_MEMORY;
242 struct intel_vgpu_pci_bar *bars = vgpu->cfg_space.bar;
243
244
245
246
247
248
249
250
251 if (new == 0xffffffff) {
252 switch (offset) {
253 case PCI_BASE_ADDRESS_0:
254 case PCI_BASE_ADDRESS_1:
255 size = ~(bars[INTEL_GVT_PCI_BAR_GTTMMIO].size -1);
256 intel_vgpu_write_pci_bar(vgpu, offset,
257 size >> (lo ? 0 : 32), lo);
258
259
260
261
262 ret = trap_gttmmio(vgpu, false);
263 break;
264 case PCI_BASE_ADDRESS_2:
265 case PCI_BASE_ADDRESS_3:
266 size = ~(bars[INTEL_GVT_PCI_BAR_APERTURE].size -1);
267 intel_vgpu_write_pci_bar(vgpu, offset,
268 size >> (lo ? 0 : 32), lo);
269 ret = map_aperture(vgpu, false);
270 break;
271 default:
272
273 intel_vgpu_write_pci_bar(vgpu, offset, 0x0, false);
274 }
275 } else {
276 switch (offset) {
277 case PCI_BASE_ADDRESS_0:
278 case PCI_BASE_ADDRESS_1:
279
280
281
282
283 trap_gttmmio(vgpu, false);
284 intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
285 ret = trap_gttmmio(vgpu, mmio_enabled);
286 break;
287 case PCI_BASE_ADDRESS_2:
288 case PCI_BASE_ADDRESS_3:
289 map_aperture(vgpu, false);
290 intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
291 ret = map_aperture(vgpu, mmio_enabled);
292 break;
293 default:
294 intel_vgpu_write_pci_bar(vgpu, offset, new, lo);
295 }
296 }
297 return ret;
298}
299
300
301
302
303
304
305
306
307
308
309
310int intel_vgpu_emulate_cfg_write(struct intel_vgpu *vgpu, unsigned int offset,
311 void *p_data, unsigned int bytes)
312{
313 struct drm_i915_private *i915 = vgpu->gvt->gt->i915;
314 int ret;
315
316 if (drm_WARN_ON(&i915->drm, bytes > 4))
317 return -EINVAL;
318
319 if (drm_WARN_ON(&i915->drm,
320 offset + bytes > vgpu->gvt->device_info.cfg_space_size))
321 return -EINVAL;
322
323
324 if (IS_ALIGNED(offset, 2) && offset == PCI_COMMAND) {
325 if (drm_WARN_ON(&i915->drm, bytes > 2))
326 return -EINVAL;
327 return emulate_pci_command_write(vgpu, offset, p_data, bytes);
328 }
329
330 switch (rounddown(offset, 4)) {
331 case PCI_ROM_ADDRESS:
332 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
333 return -EINVAL;
334 return emulate_pci_rom_bar_write(vgpu, offset, p_data, bytes);
335
336 case PCI_BASE_ADDRESS_0 ... PCI_BASE_ADDRESS_5:
337 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
338 return -EINVAL;
339 return emulate_pci_bar_write(vgpu, offset, p_data, bytes);
340
341 case INTEL_GVT_PCI_SWSCI:
342 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
343 return -EINVAL;
344 ret = intel_vgpu_emulate_opregion_request(vgpu, *(u32 *)p_data);
345 if (ret)
346 return ret;
347 break;
348
349 case INTEL_GVT_PCI_OPREGION:
350 if (drm_WARN_ON(&i915->drm, !IS_ALIGNED(offset, 4)))
351 return -EINVAL;
352 ret = intel_vgpu_opregion_base_write_handler(vgpu,
353 *(u32 *)p_data);
354 if (ret)
355 return ret;
356
357 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
358 break;
359 default:
360 vgpu_pci_cfg_mem_write(vgpu, offset, p_data, bytes);
361 break;
362 }
363 return 0;
364}
365
366
367
368
369
370
371
372
373void intel_vgpu_init_cfg_space(struct intel_vgpu *vgpu,
374 bool primary)
375{
376 struct intel_gvt *gvt = vgpu->gvt;
377 struct pci_dev *pdev = to_pci_dev(gvt->gt->i915->drm.dev);
378 const struct intel_gvt_device_info *info = &gvt->device_info;
379 u16 *gmch_ctl;
380 u8 next;
381
382 memcpy(vgpu_cfg_space(vgpu), gvt->firmware.cfg_space,
383 info->cfg_space_size);
384
385 if (!primary) {
386 vgpu_cfg_space(vgpu)[PCI_CLASS_DEVICE] =
387 INTEL_GVT_PCI_CLASS_VGA_OTHER;
388 vgpu_cfg_space(vgpu)[PCI_CLASS_PROG] =
389 INTEL_GVT_PCI_CLASS_VGA_OTHER;
390 }
391
392
393 gmch_ctl = (u16 *)(vgpu_cfg_space(vgpu) + INTEL_GVT_PCI_GMCH_CONTROL);
394 *gmch_ctl &= ~(BDW_GMCH_GMS_MASK << BDW_GMCH_GMS_SHIFT);
395
396 intel_vgpu_write_pci_bar(vgpu, PCI_BASE_ADDRESS_2,
397 gvt_aperture_pa_base(gvt), true);
398
399 vgpu_cfg_space(vgpu)[PCI_COMMAND] &= ~(PCI_COMMAND_IO
400 | PCI_COMMAND_MEMORY
401 | PCI_COMMAND_MASTER);
402
403
404
405 memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_1, 0, 4);
406 memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_3, 0, 4);
407 memset(vgpu_cfg_space(vgpu) + PCI_BASE_ADDRESS_4, 0, 8);
408 memset(vgpu_cfg_space(vgpu) + INTEL_GVT_PCI_OPREGION, 0, 4);
409
410 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_GTTMMIO].size =
411 pci_resource_len(pdev, 0);
412 vgpu->cfg_space.bar[INTEL_GVT_PCI_BAR_APERTURE].size =
413 pci_resource_len(pdev, 2);
414
415 memset(vgpu_cfg_space(vgpu) + PCI_ROM_ADDRESS, 0, 4);
416
417
418 vgpu->cfg_space.pmcsr_off = 0;
419 if (vgpu_cfg_space(vgpu)[PCI_STATUS] & PCI_STATUS_CAP_LIST) {
420 next = vgpu_cfg_space(vgpu)[PCI_CAPABILITY_LIST];
421 do {
422 if (vgpu_cfg_space(vgpu)[next + PCI_CAP_LIST_ID] == PCI_CAP_ID_PM) {
423 vgpu->cfg_space.pmcsr_off = next + PCI_PM_CTRL;
424 break;
425 }
426 next = vgpu_cfg_space(vgpu)[next + PCI_CAP_LIST_NEXT];
427 } while (next);
428 }
429}
430
431
432
433
434
435
436
437void intel_vgpu_reset_cfg_space(struct intel_vgpu *vgpu)
438{
439 u8 cmd = vgpu_cfg_space(vgpu)[PCI_COMMAND];
440 bool primary = vgpu_cfg_space(vgpu)[PCI_CLASS_DEVICE] !=
441 INTEL_GVT_PCI_CLASS_VGA_OTHER;
442
443 if (cmd & PCI_COMMAND_MEMORY) {
444 trap_gttmmio(vgpu, false);
445 map_aperture(vgpu, false);
446 }
447
448
449
450
451
452
453 intel_vgpu_init_cfg_space(vgpu, primary);
454}
455