1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30#include <linux/acpi.h>
31#include <linux/device.h>
32#include <linux/module.h>
33#include <linux/oom.h>
34#include <linux/pci.h>
35#include <linux/pm.h>
36#include <linux/pm_runtime.h>
37#include <linux/pnp.h>
38#include <linux/slab.h>
39#include <linux/string_helpers.h>
40#include <linux/vga_switcheroo.h>
41#include <linux/vt.h>
42
43#include <drm/drm_aperture.h>
44#include <drm/drm_atomic_helper.h>
45#include <drm/drm_ioctl.h>
46#include <drm/drm_managed.h>
47#include <drm/drm_probe_helper.h>
48
49#include "display/intel_acpi.h"
50#include "display/intel_bw.h"
51#include "display/intel_cdclk.h"
52#include "display/intel_display_types.h"
53#include "display/intel_dmc.h"
54#include "display/intel_dp.h"
55#include "display/intel_dpt.h"
56#include "display/intel_fbdev.h"
57#include "display/intel_hotplug.h"
58#include "display/intel_overlay.h"
59#include "display/intel_pch_refclk.h"
60#include "display/intel_pipe_crc.h"
61#include "display/intel_pps.h"
62#include "display/intel_sprite.h"
63#include "display/intel_vga.h"
64
65#include "gem/i915_gem_context.h"
66#include "gem/i915_gem_create.h"
67#include "gem/i915_gem_dmabuf.h"
68#include "gem/i915_gem_ioctls.h"
69#include "gem/i915_gem_mman.h"
70#include "gem/i915_gem_pm.h"
71#include "gt/intel_gt.h"
72#include "gt/intel_gt_pm.h"
73#include "gt/intel_rc6.h"
74
75#include "pxp/intel_pxp_pm.h"
76
77#include "i915_file_private.h"
78#include "i915_debugfs.h"
79#include "i915_driver.h"
80#include "i915_drm_client.h"
81#include "i915_drv.h"
82#include "i915_getparam.h"
83#include "i915_ioc32.h"
84#include "i915_ioctl.h"
85#include "i915_irq.h"
86#include "i915_memcpy.h"
87#include "i915_perf.h"
88#include "i915_query.h"
89#include "i915_suspend.h"
90#include "i915_switcheroo.h"
91#include "i915_sysfs.h"
92#include "i915_utils.h"
93#include "i915_vgpu.h"
94#include "intel_dram.h"
95#include "intel_gvt.h"
96#include "intel_memory_region.h"
97#include "intel_pci_config.h"
98#include "intel_pcode.h"
99#include "intel_pm.h"
100#include "intel_region_ttm.h"
101#include "vlv_suspend.h"
102
103static const struct drm_driver i915_drm_driver;
104
105static int i915_get_bridge_dev(struct drm_i915_private *dev_priv)
106{
107 int domain = pci_domain_nr(to_pci_dev(dev_priv->drm.dev)->bus);
108
109 dev_priv->bridge_dev =
110 pci_get_domain_bus_and_slot(domain, 0, PCI_DEVFN(0, 0));
111 if (!dev_priv->bridge_dev) {
112 drm_err(&dev_priv->drm, "bridge device not found\n");
113 return -EIO;
114 }
115 return 0;
116}
117
118
119static int
120intel_alloc_mchbar_resource(struct drm_i915_private *dev_priv)
121{
122 int reg = GRAPHICS_VER(dev_priv) >= 4 ? MCHBAR_I965 : MCHBAR_I915;
123 u32 temp_lo, temp_hi = 0;
124 u64 mchbar_addr;
125 int ret;
126
127 if (GRAPHICS_VER(dev_priv) >= 4)
128 pci_read_config_dword(dev_priv->bridge_dev, reg + 4, &temp_hi);
129 pci_read_config_dword(dev_priv->bridge_dev, reg, &temp_lo);
130 mchbar_addr = ((u64)temp_hi << 32) | temp_lo;
131
132
133#ifdef CONFIG_PNP
134 if (mchbar_addr &&
135 pnp_range_reserved(mchbar_addr, mchbar_addr + MCHBAR_SIZE))
136 return 0;
137#endif
138
139
140 dev_priv->mch_res.name = "i915 MCHBAR";
141 dev_priv->mch_res.flags = IORESOURCE_MEM;
142 ret = pci_bus_alloc_resource(dev_priv->bridge_dev->bus,
143 &dev_priv->mch_res,
144 MCHBAR_SIZE, MCHBAR_SIZE,
145 PCIBIOS_MIN_MEM,
146 0, pcibios_align_resource,
147 dev_priv->bridge_dev);
148 if (ret) {
149 drm_dbg(&dev_priv->drm, "failed bus alloc: %d\n", ret);
150 dev_priv->mch_res.start = 0;
151 return ret;
152 }
153
154 if (GRAPHICS_VER(dev_priv) >= 4)
155 pci_write_config_dword(dev_priv->bridge_dev, reg + 4,
156 upper_32_bits(dev_priv->mch_res.start));
157
158 pci_write_config_dword(dev_priv->bridge_dev, reg,
159 lower_32_bits(dev_priv->mch_res.start));
160 return 0;
161}
162
163
164static void
165intel_setup_mchbar(struct drm_i915_private *dev_priv)
166{
167 int mchbar_reg = GRAPHICS_VER(dev_priv) >= 4 ? MCHBAR_I965 : MCHBAR_I915;
168 u32 temp;
169 bool enabled;
170
171 if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
172 return;
173
174 dev_priv->mchbar_need_disable = false;
175
176 if (IS_I915G(dev_priv) || IS_I915GM(dev_priv)) {
177 pci_read_config_dword(dev_priv->bridge_dev, DEVEN, &temp);
178 enabled = !!(temp & DEVEN_MCHBAR_EN);
179 } else {
180 pci_read_config_dword(dev_priv->bridge_dev, mchbar_reg, &temp);
181 enabled = temp & 1;
182 }
183
184
185 if (enabled)
186 return;
187
188 if (intel_alloc_mchbar_resource(dev_priv))
189 return;
190
191 dev_priv->mchbar_need_disable = true;
192
193
194 if (IS_I915G(dev_priv) || IS_I915GM(dev_priv)) {
195 pci_write_config_dword(dev_priv->bridge_dev, DEVEN,
196 temp | DEVEN_MCHBAR_EN);
197 } else {
198 pci_read_config_dword(dev_priv->bridge_dev, mchbar_reg, &temp);
199 pci_write_config_dword(dev_priv->bridge_dev, mchbar_reg, temp | 1);
200 }
201}
202
203static void
204intel_teardown_mchbar(struct drm_i915_private *dev_priv)
205{
206 int mchbar_reg = GRAPHICS_VER(dev_priv) >= 4 ? MCHBAR_I965 : MCHBAR_I915;
207
208 if (dev_priv->mchbar_need_disable) {
209 if (IS_I915G(dev_priv) || IS_I915GM(dev_priv)) {
210 u32 deven_val;
211
212 pci_read_config_dword(dev_priv->bridge_dev, DEVEN,
213 &deven_val);
214 deven_val &= ~DEVEN_MCHBAR_EN;
215 pci_write_config_dword(dev_priv->bridge_dev, DEVEN,
216 deven_val);
217 } else {
218 u32 mchbar_val;
219
220 pci_read_config_dword(dev_priv->bridge_dev, mchbar_reg,
221 &mchbar_val);
222 mchbar_val &= ~1;
223 pci_write_config_dword(dev_priv->bridge_dev, mchbar_reg,
224 mchbar_val);
225 }
226 }
227
228 if (dev_priv->mch_res.start)
229 release_resource(&dev_priv->mch_res);
230}
231
232static int i915_workqueues_init(struct drm_i915_private *dev_priv)
233{
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248 dev_priv->wq = alloc_ordered_workqueue("i915", 0);
249 if (dev_priv->wq == NULL)
250 goto out_err;
251
252 dev_priv->hotplug.dp_wq = alloc_ordered_workqueue("i915-dp", 0);
253 if (dev_priv->hotplug.dp_wq == NULL)
254 goto out_free_wq;
255
256 return 0;
257
258out_free_wq:
259 destroy_workqueue(dev_priv->wq);
260out_err:
261 drm_err(&dev_priv->drm, "Failed to allocate workqueues.\n");
262
263 return -ENOMEM;
264}
265
266static void i915_workqueues_cleanup(struct drm_i915_private *dev_priv)
267{
268 destroy_workqueue(dev_priv->hotplug.dp_wq);
269 destroy_workqueue(dev_priv->wq);
270}
271
272
273
274
275
276
277
278
279
280
281
282static void intel_detect_preproduction_hw(struct drm_i915_private *dev_priv)
283{
284 bool pre = false;
285
286 pre |= IS_HSW_EARLY_SDV(dev_priv);
287 pre |= IS_SKYLAKE(dev_priv) && INTEL_REVID(dev_priv) < 0x6;
288 pre |= IS_BROXTON(dev_priv) && INTEL_REVID(dev_priv) < 0xA;
289 pre |= IS_KABYLAKE(dev_priv) && INTEL_REVID(dev_priv) < 0x1;
290 pre |= IS_GEMINILAKE(dev_priv) && INTEL_REVID(dev_priv) < 0x3;
291 pre |= IS_ICELAKE(dev_priv) && INTEL_REVID(dev_priv) < 0x7;
292
293 if (pre) {
294 drm_err(&dev_priv->drm, "This is a pre-production stepping. "
295 "It may not be fully functional.\n");
296 add_taint(TAINT_MACHINE_CHECK, LOCKDEP_STILL_OK);
297 }
298}
299
300static void sanitize_gpu(struct drm_i915_private *i915)
301{
302 if (!INTEL_INFO(i915)->gpu_reset_clobbers_display)
303 __intel_gt_reset(to_gt(i915), ALL_ENGINES);
304}
305
306
307
308
309
310
311
312
313
314
315
316static int i915_driver_early_probe(struct drm_i915_private *dev_priv)
317{
318 int ret = 0;
319
320 if (i915_inject_probe_failure(dev_priv))
321 return -ENODEV;
322
323 intel_device_info_subplatform_init(dev_priv);
324 intel_step_init(dev_priv);
325
326 intel_uncore_mmio_debug_init_early(&dev_priv->mmio_debug);
327
328 spin_lock_init(&dev_priv->irq_lock);
329 spin_lock_init(&dev_priv->gpu_error.lock);
330 mutex_init(&dev_priv->backlight_lock);
331
332 mutex_init(&dev_priv->sb_lock);
333 cpu_latency_qos_add_request(&dev_priv->sb_qos, PM_QOS_DEFAULT_VALUE);
334
335 mutex_init(&dev_priv->audio.mutex);
336 mutex_init(&dev_priv->wm.wm_mutex);
337 mutex_init(&dev_priv->pps_mutex);
338 mutex_init(&dev_priv->hdcp_comp_mutex);
339
340 i915_memcpy_init_early(dev_priv);
341 intel_runtime_pm_init_early(&dev_priv->runtime_pm);
342
343 ret = i915_workqueues_init(dev_priv);
344 if (ret < 0)
345 return ret;
346
347 ret = vlv_suspend_init(dev_priv);
348 if (ret < 0)
349 goto err_workqueues;
350
351 ret = intel_region_ttm_device_init(dev_priv);
352 if (ret)
353 goto err_ttm;
354
355 intel_wopcm_init_early(&dev_priv->wopcm);
356
357 intel_root_gt_init_early(dev_priv);
358
359 i915_drm_clients_init(&dev_priv->clients, dev_priv);
360
361 i915_gem_init_early(dev_priv);
362
363
364 intel_detect_pch(dev_priv);
365
366 intel_pm_setup(dev_priv);
367 ret = intel_power_domains_init(dev_priv);
368 if (ret < 0)
369 goto err_gem;
370 intel_irq_init(dev_priv);
371 intel_init_display_hooks(dev_priv);
372 intel_init_clock_gating_hooks(dev_priv);
373
374 intel_detect_preproduction_hw(dev_priv);
375
376 return 0;
377
378err_gem:
379 i915_gem_cleanup_early(dev_priv);
380 intel_gt_driver_late_release_all(dev_priv);
381 i915_drm_clients_fini(&dev_priv->clients);
382 intel_region_ttm_device_fini(dev_priv);
383err_ttm:
384 vlv_suspend_cleanup(dev_priv);
385err_workqueues:
386 i915_workqueues_cleanup(dev_priv);
387 return ret;
388}
389
390
391
392
393
394
395static void i915_driver_late_release(struct drm_i915_private *dev_priv)
396{
397 intel_irq_fini(dev_priv);
398 intel_power_domains_cleanup(dev_priv);
399 i915_gem_cleanup_early(dev_priv);
400 intel_gt_driver_late_release_all(dev_priv);
401 i915_drm_clients_fini(&dev_priv->clients);
402 intel_region_ttm_device_fini(dev_priv);
403 vlv_suspend_cleanup(dev_priv);
404 i915_workqueues_cleanup(dev_priv);
405
406 cpu_latency_qos_remove_request(&dev_priv->sb_qos);
407 mutex_destroy(&dev_priv->sb_lock);
408
409 i915_params_free(&dev_priv->params);
410}
411
412
413
414
415
416
417
418
419
420
421static int i915_driver_mmio_probe(struct drm_i915_private *dev_priv)
422{
423 int ret;
424
425 if (i915_inject_probe_failure(dev_priv))
426 return -ENODEV;
427
428 ret = i915_get_bridge_dev(dev_priv);
429 if (ret < 0)
430 return ret;
431
432 ret = intel_uncore_init_mmio(&dev_priv->uncore);
433 if (ret)
434 return ret;
435
436
437 intel_setup_mchbar(dev_priv);
438 intel_device_info_runtime_init(dev_priv);
439
440 ret = intel_gt_init_mmio(to_gt(dev_priv));
441 if (ret)
442 goto err_uncore;
443
444
445 sanitize_gpu(dev_priv);
446
447 return 0;
448
449err_uncore:
450 intel_teardown_mchbar(dev_priv);
451 intel_uncore_fini_mmio(&dev_priv->uncore);
452 pci_dev_put(dev_priv->bridge_dev);
453
454 return ret;
455}
456
457
458
459
460
461static void i915_driver_mmio_release(struct drm_i915_private *dev_priv)
462{
463 intel_teardown_mchbar(dev_priv);
464 intel_uncore_fini_mmio(&dev_priv->uncore);
465 pci_dev_put(dev_priv->bridge_dev);
466}
467
468
469
470
471
472
473
474
475
476
477
478
479static int i915_set_dma_info(struct drm_i915_private *i915)
480{
481 unsigned int mask_size = INTEL_INFO(i915)->dma_mask_size;
482 int ret;
483
484 GEM_BUG_ON(!mask_size);
485
486
487
488
489
490 dma_set_max_seg_size(i915->drm.dev, UINT_MAX);
491
492 ret = dma_set_mask(i915->drm.dev, DMA_BIT_MASK(mask_size));
493 if (ret)
494 goto mask_err;
495
496
497 if (GRAPHICS_VER(i915) == 2)
498 mask_size = 30;
499
500
501
502
503
504
505
506
507
508
509 if (IS_I965G(i915) || IS_I965GM(i915))
510 mask_size = 32;
511
512 ret = dma_set_coherent_mask(i915->drm.dev, DMA_BIT_MASK(mask_size));
513 if (ret)
514 goto mask_err;
515
516 return 0;
517
518mask_err:
519 drm_err(&i915->drm, "Can't set DMA mask/consistent mask (%d)\n", ret);
520 return ret;
521}
522
523
524
525
526
527
528
529
530static int i915_driver_hw_probe(struct drm_i915_private *dev_priv)
531{
532 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
533 struct pci_dev *root_pdev;
534 int ret;
535
536 if (i915_inject_probe_failure(dev_priv))
537 return -ENODEV;
538
539 if (HAS_PPGTT(dev_priv)) {
540 if (intel_vgpu_active(dev_priv) &&
541 !intel_vgpu_has_full_ppgtt(dev_priv)) {
542 i915_report_error(dev_priv,
543 "incompatible vGPU found, support for isolated ppGTT required\n");
544 return -ENXIO;
545 }
546 }
547
548 if (HAS_EXECLISTS(dev_priv)) {
549
550
551
552
553
554 if (intel_vgpu_active(dev_priv) &&
555 !intel_vgpu_has_hwsp_emulation(dev_priv)) {
556 i915_report_error(dev_priv,
557 "old vGPU host found, support for HWSP emulation required\n");
558 return -ENXIO;
559 }
560 }
561
562
563 intel_dram_edram_detect(dev_priv);
564
565 ret = i915_set_dma_info(dev_priv);
566 if (ret)
567 return ret;
568
569 i915_perf_init(dev_priv);
570
571 ret = intel_gt_assign_ggtt(to_gt(dev_priv));
572 if (ret)
573 goto err_perf;
574
575 ret = i915_ggtt_probe_hw(dev_priv);
576 if (ret)
577 goto err_perf;
578
579 ret = drm_aperture_remove_conflicting_pci_framebuffers(pdev, dev_priv->drm.driver);
580 if (ret)
581 goto err_ggtt;
582
583 ret = i915_ggtt_init_hw(dev_priv);
584 if (ret)
585 goto err_ggtt;
586
587 ret = intel_memory_regions_hw_probe(dev_priv);
588 if (ret)
589 goto err_ggtt;
590
591 ret = intel_gt_tiles_init(dev_priv);
592 if (ret)
593 goto err_mem_regions;
594
595 ret = i915_ggtt_enable_hw(dev_priv);
596 if (ret) {
597 drm_err(&dev_priv->drm, "failed to enable GGTT\n");
598 goto err_mem_regions;
599 }
600
601 pci_set_master(pdev);
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622 if (GRAPHICS_VER(dev_priv) >= 5) {
623 if (pci_enable_msi(pdev) < 0)
624 drm_dbg(&dev_priv->drm, "can't enable MSI");
625 }
626
627 ret = intel_gvt_init(dev_priv);
628 if (ret)
629 goto err_msi;
630
631 intel_opregion_setup(dev_priv);
632
633 ret = intel_pcode_init(dev_priv);
634 if (ret)
635 goto err_msi;
636
637
638
639
640
641 intel_dram_detect(dev_priv);
642
643 intel_bw_init_hw(dev_priv);
644
645
646
647
648
649
650 root_pdev = pcie_find_root_port(pdev);
651 if (root_pdev)
652 pci_d3cold_disable(root_pdev);
653
654 return 0;
655
656err_msi:
657 if (pdev->msi_enabled)
658 pci_disable_msi(pdev);
659err_mem_regions:
660 intel_memory_regions_driver_release(dev_priv);
661err_ggtt:
662 i915_ggtt_driver_release(dev_priv);
663 i915_gem_drain_freed_objects(dev_priv);
664 i915_ggtt_driver_late_release(dev_priv);
665err_perf:
666 i915_perf_fini(dev_priv);
667 return ret;
668}
669
670
671
672
673
674static void i915_driver_hw_remove(struct drm_i915_private *dev_priv)
675{
676 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
677 struct pci_dev *root_pdev;
678
679 i915_perf_fini(dev_priv);
680
681 if (pdev->msi_enabled)
682 pci_disable_msi(pdev);
683
684 root_pdev = pcie_find_root_port(pdev);
685 if (root_pdev)
686 pci_d3cold_enable(root_pdev);
687}
688
689
690
691
692
693
694
695
696static void i915_driver_register(struct drm_i915_private *dev_priv)
697{
698 struct drm_device *dev = &dev_priv->drm;
699
700 i915_gem_driver_register(dev_priv);
701 i915_pmu_register(dev_priv);
702
703 intel_vgpu_register(dev_priv);
704
705
706 if (drm_dev_register(dev, 0)) {
707 drm_err(&dev_priv->drm,
708 "Failed to register driver for userspace access!\n");
709 return;
710 }
711
712 i915_debugfs_register(dev_priv);
713 i915_setup_sysfs(dev_priv);
714
715
716 i915_perf_register(dev_priv);
717
718 intel_gt_driver_register(to_gt(dev_priv));
719
720 intel_display_driver_register(dev_priv);
721
722 intel_power_domains_enable(dev_priv);
723 intel_runtime_pm_enable(&dev_priv->runtime_pm);
724
725 intel_register_dsm_handler();
726
727 if (i915_switcheroo_register(dev_priv))
728 drm_err(&dev_priv->drm, "Failed to register vga switcheroo!\n");
729}
730
731
732
733
734
735static void i915_driver_unregister(struct drm_i915_private *dev_priv)
736{
737 i915_switcheroo_unregister(dev_priv);
738
739 intel_unregister_dsm_handler();
740
741 intel_runtime_pm_disable(&dev_priv->runtime_pm);
742 intel_power_domains_disable(dev_priv);
743
744 intel_display_driver_unregister(dev_priv);
745
746 intel_gt_driver_unregister(to_gt(dev_priv));
747
748 i915_perf_unregister(dev_priv);
749 i915_pmu_unregister(dev_priv);
750
751 i915_teardown_sysfs(dev_priv);
752 drm_dev_unplug(&dev_priv->drm);
753
754 i915_gem_driver_unregister(dev_priv);
755}
756
757void
758i915_print_iommu_status(struct drm_i915_private *i915, struct drm_printer *p)
759{
760 drm_printf(p, "iommu: %s\n",
761 str_enabled_disabled(i915_vtd_active(i915)));
762}
763
764static void i915_welcome_messages(struct drm_i915_private *dev_priv)
765{
766 if (drm_debug_enabled(DRM_UT_DRIVER)) {
767 struct drm_printer p = drm_debug_printer("i915 device info:");
768
769 drm_printf(&p, "pciid=0x%04x rev=0x%02x platform=%s (subplatform=0x%x) gen=%i\n",
770 INTEL_DEVID(dev_priv),
771 INTEL_REVID(dev_priv),
772 intel_platform_name(INTEL_INFO(dev_priv)->platform),
773 intel_subplatform(RUNTIME_INFO(dev_priv),
774 INTEL_INFO(dev_priv)->platform),
775 GRAPHICS_VER(dev_priv));
776
777 intel_device_info_print_static(INTEL_INFO(dev_priv), &p);
778 intel_device_info_print_runtime(RUNTIME_INFO(dev_priv), &p);
779 i915_print_iommu_status(dev_priv, &p);
780 intel_gt_info_print(&to_gt(dev_priv)->info, &p);
781 }
782
783 if (IS_ENABLED(CONFIG_DRM_I915_DEBUG))
784 drm_info(&dev_priv->drm, "DRM_I915_DEBUG enabled\n");
785 if (IS_ENABLED(CONFIG_DRM_I915_DEBUG_GEM))
786 drm_info(&dev_priv->drm, "DRM_I915_DEBUG_GEM enabled\n");
787 if (IS_ENABLED(CONFIG_DRM_I915_DEBUG_RUNTIME_PM))
788 drm_info(&dev_priv->drm,
789 "DRM_I915_DEBUG_RUNTIME_PM enabled\n");
790}
791
792static struct drm_i915_private *
793i915_driver_create(struct pci_dev *pdev, const struct pci_device_id *ent)
794{
795 const struct intel_device_info *match_info =
796 (struct intel_device_info *)ent->driver_data;
797 struct intel_device_info *device_info;
798 struct drm_i915_private *i915;
799
800 i915 = devm_drm_dev_alloc(&pdev->dev, &i915_drm_driver,
801 struct drm_i915_private, drm);
802 if (IS_ERR(i915))
803 return i915;
804
805 pci_set_drvdata(pdev, i915);
806
807
808 i915_params_copy(&i915->params, &i915_modparams);
809
810
811 device_info = mkwrite_device_info(i915);
812 memcpy(device_info, match_info, sizeof(*device_info));
813 RUNTIME_INFO(i915)->device_id = pdev->device;
814
815 return i915;
816}
817
818
819
820
821
822
823
824
825
826
827
828
829int i915_driver_probe(struct pci_dev *pdev, const struct pci_device_id *ent)
830{
831 const struct intel_device_info *match_info =
832 (struct intel_device_info *)ent->driver_data;
833 struct drm_i915_private *i915;
834 int ret;
835
836 i915 = i915_driver_create(pdev, ent);
837 if (IS_ERR(i915))
838 return PTR_ERR(i915);
839
840
841 if (!i915->params.nuclear_pageflip && match_info->graphics.ver < 5)
842 i915->drm.driver_features &= ~DRIVER_ATOMIC;
843
844 ret = pci_enable_device(pdev);
845 if (ret)
846 goto out_fini;
847
848 ret = i915_driver_early_probe(i915);
849 if (ret < 0)
850 goto out_pci_disable;
851
852 disable_rpm_wakeref_asserts(&i915->runtime_pm);
853
854 intel_vgpu_detect(i915);
855
856 ret = intel_gt_probe_all(i915);
857 if (ret < 0)
858 goto out_runtime_pm_put;
859
860 ret = i915_driver_mmio_probe(i915);
861 if (ret < 0)
862 goto out_tiles_cleanup;
863
864 ret = i915_driver_hw_probe(i915);
865 if (ret < 0)
866 goto out_cleanup_mmio;
867
868 ret = intel_modeset_init_noirq(i915);
869 if (ret < 0)
870 goto out_cleanup_hw;
871
872 ret = intel_irq_install(i915);
873 if (ret)
874 goto out_cleanup_modeset;
875
876 ret = intel_modeset_init_nogem(i915);
877 if (ret)
878 goto out_cleanup_irq;
879
880 ret = i915_gem_init(i915);
881 if (ret)
882 goto out_cleanup_modeset2;
883
884 ret = intel_modeset_init(i915);
885 if (ret)
886 goto out_cleanup_gem;
887
888 i915_driver_register(i915);
889
890 enable_rpm_wakeref_asserts(&i915->runtime_pm);
891
892 i915_welcome_messages(i915);
893
894 i915->do_release = true;
895
896 return 0;
897
898out_cleanup_gem:
899 i915_gem_suspend(i915);
900 i915_gem_driver_remove(i915);
901 i915_gem_driver_release(i915);
902out_cleanup_modeset2:
903
904 intel_modeset_driver_remove(i915);
905 intel_irq_uninstall(i915);
906 intel_modeset_driver_remove_noirq(i915);
907 goto out_cleanup_modeset;
908out_cleanup_irq:
909 intel_irq_uninstall(i915);
910out_cleanup_modeset:
911 intel_modeset_driver_remove_nogem(i915);
912out_cleanup_hw:
913 i915_driver_hw_remove(i915);
914 intel_memory_regions_driver_release(i915);
915 i915_ggtt_driver_release(i915);
916 i915_gem_drain_freed_objects(i915);
917 i915_ggtt_driver_late_release(i915);
918out_cleanup_mmio:
919 i915_driver_mmio_release(i915);
920out_tiles_cleanup:
921 intel_gt_release_all(i915);
922out_runtime_pm_put:
923 enable_rpm_wakeref_asserts(&i915->runtime_pm);
924 i915_driver_late_release(i915);
925out_pci_disable:
926 pci_disable_device(pdev);
927out_fini:
928 i915_probe_error(i915, "Device initialization failed (%d)\n", ret);
929 return ret;
930}
931
932void i915_driver_remove(struct drm_i915_private *i915)
933{
934 disable_rpm_wakeref_asserts(&i915->runtime_pm);
935
936 i915_driver_unregister(i915);
937
938
939 synchronize_rcu();
940
941 i915_gem_suspend(i915);
942
943 intel_gvt_driver_remove(i915);
944
945 intel_modeset_driver_remove(i915);
946
947 intel_irq_uninstall(i915);
948
949 intel_modeset_driver_remove_noirq(i915);
950
951 i915_reset_error_state(i915);
952 i915_gem_driver_remove(i915);
953
954 intel_modeset_driver_remove_nogem(i915);
955
956 i915_driver_hw_remove(i915);
957
958 enable_rpm_wakeref_asserts(&i915->runtime_pm);
959}
960
961static void i915_driver_release(struct drm_device *dev)
962{
963 struct drm_i915_private *dev_priv = to_i915(dev);
964 struct intel_runtime_pm *rpm = &dev_priv->runtime_pm;
965
966 if (!dev_priv->do_release)
967 return;
968
969 disable_rpm_wakeref_asserts(rpm);
970
971 i915_gem_driver_release(dev_priv);
972
973 intel_memory_regions_driver_release(dev_priv);
974 i915_ggtt_driver_release(dev_priv);
975 i915_gem_drain_freed_objects(dev_priv);
976 i915_ggtt_driver_late_release(dev_priv);
977
978 i915_driver_mmio_release(dev_priv);
979
980 enable_rpm_wakeref_asserts(rpm);
981 intel_runtime_pm_driver_release(rpm);
982
983 i915_driver_late_release(dev_priv);
984}
985
986static int i915_driver_open(struct drm_device *dev, struct drm_file *file)
987{
988 struct drm_i915_private *i915 = to_i915(dev);
989 int ret;
990
991 ret = i915_gem_open(i915, file);
992 if (ret)
993 return ret;
994
995 return 0;
996}
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010static void i915_driver_lastclose(struct drm_device *dev)
1011{
1012 struct drm_i915_private *i915 = to_i915(dev);
1013
1014 intel_fbdev_restore_mode(dev);
1015
1016 if (HAS_DISPLAY(i915))
1017 vga_switcheroo_process_delayed_switch();
1018}
1019
1020static void i915_driver_postclose(struct drm_device *dev, struct drm_file *file)
1021{
1022 struct drm_i915_file_private *file_priv = file->driver_priv;
1023
1024 i915_gem_context_close(file);
1025 i915_drm_client_put(file_priv->client);
1026
1027 kfree_rcu(file_priv, rcu);
1028
1029
1030 i915_gem_flush_free_objects(to_i915(dev));
1031}
1032
1033static void intel_suspend_encoders(struct drm_i915_private *dev_priv)
1034{
1035 struct drm_device *dev = &dev_priv->drm;
1036 struct intel_encoder *encoder;
1037
1038 if (!HAS_DISPLAY(dev_priv))
1039 return;
1040
1041 drm_modeset_lock_all(dev);
1042 for_each_intel_encoder(dev, encoder)
1043 if (encoder->suspend)
1044 encoder->suspend(encoder);
1045 drm_modeset_unlock_all(dev);
1046}
1047
1048static void intel_shutdown_encoders(struct drm_i915_private *dev_priv)
1049{
1050 struct drm_device *dev = &dev_priv->drm;
1051 struct intel_encoder *encoder;
1052
1053 if (!HAS_DISPLAY(dev_priv))
1054 return;
1055
1056 drm_modeset_lock_all(dev);
1057 for_each_intel_encoder(dev, encoder)
1058 if (encoder->shutdown)
1059 encoder->shutdown(encoder);
1060 drm_modeset_unlock_all(dev);
1061}
1062
1063void i915_driver_shutdown(struct drm_i915_private *i915)
1064{
1065 disable_rpm_wakeref_asserts(&i915->runtime_pm);
1066 intel_runtime_pm_disable(&i915->runtime_pm);
1067 intel_power_domains_disable(i915);
1068
1069 i915_gem_suspend(i915);
1070
1071 if (HAS_DISPLAY(i915)) {
1072 drm_kms_helper_poll_disable(&i915->drm);
1073
1074 drm_atomic_helper_shutdown(&i915->drm);
1075 }
1076
1077 intel_dp_mst_suspend(i915);
1078
1079 intel_runtime_pm_disable_interrupts(i915);
1080 intel_hpd_cancel_work(i915);
1081
1082 intel_suspend_encoders(i915);
1083 intel_shutdown_encoders(i915);
1084
1085 intel_dmc_ucode_suspend(i915);
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098 intel_power_domains_driver_remove(i915);
1099 enable_rpm_wakeref_asserts(&i915->runtime_pm);
1100
1101 intel_runtime_pm_driver_release(&i915->runtime_pm);
1102}
1103
1104static bool suspend_to_idle(struct drm_i915_private *dev_priv)
1105{
1106#if IS_ENABLED(CONFIG_ACPI_SLEEP)
1107 if (acpi_target_system_state() < ACPI_STATE_S3)
1108 return true;
1109#endif
1110 return false;
1111}
1112
1113static int i915_drm_prepare(struct drm_device *dev)
1114{
1115 struct drm_i915_private *i915 = to_i915(dev);
1116
1117
1118
1119
1120
1121
1122
1123 return i915_gem_backup_suspend(i915);
1124}
1125
1126static int i915_drm_suspend(struct drm_device *dev)
1127{
1128 struct drm_i915_private *dev_priv = to_i915(dev);
1129 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
1130 pci_power_t opregion_target_state;
1131
1132 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1133
1134
1135
1136 intel_power_domains_disable(dev_priv);
1137 if (HAS_DISPLAY(dev_priv))
1138 drm_kms_helper_poll_disable(dev);
1139
1140 pci_save_state(pdev);
1141
1142 intel_display_suspend(dev);
1143
1144 intel_dp_mst_suspend(dev_priv);
1145
1146 intel_runtime_pm_disable_interrupts(dev_priv);
1147 intel_hpd_cancel_work(dev_priv);
1148
1149 intel_suspend_encoders(dev_priv);
1150
1151 intel_suspend_hw(dev_priv);
1152
1153
1154 intel_dpt_suspend(dev_priv);
1155 i915_ggtt_suspend(to_gt(dev_priv)->ggtt);
1156
1157 i915_save_display(dev_priv);
1158
1159 opregion_target_state = suspend_to_idle(dev_priv) ? PCI_D1 : PCI_D3cold;
1160 intel_opregion_suspend(dev_priv, opregion_target_state);
1161
1162 intel_fbdev_set_suspend(dev, FBINFO_STATE_SUSPENDED, true);
1163
1164 dev_priv->suspend_count++;
1165
1166 intel_dmc_ucode_suspend(dev_priv);
1167
1168 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1169
1170 return 0;
1171}
1172
1173static enum i915_drm_suspend_mode
1174get_suspend_mode(struct drm_i915_private *dev_priv, bool hibernate)
1175{
1176 if (hibernate)
1177 return I915_DRM_SUSPEND_HIBERNATE;
1178
1179 if (suspend_to_idle(dev_priv))
1180 return I915_DRM_SUSPEND_IDLE;
1181
1182 return I915_DRM_SUSPEND_MEM;
1183}
1184
1185static int i915_drm_suspend_late(struct drm_device *dev, bool hibernation)
1186{
1187 struct drm_i915_private *dev_priv = to_i915(dev);
1188 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
1189 struct intel_runtime_pm *rpm = &dev_priv->runtime_pm;
1190 int ret;
1191
1192 disable_rpm_wakeref_asserts(rpm);
1193
1194 i915_gem_suspend_late(dev_priv);
1195
1196 intel_uncore_suspend(&dev_priv->uncore);
1197
1198 intel_power_domains_suspend(dev_priv,
1199 get_suspend_mode(dev_priv, hibernation));
1200
1201 intel_display_power_suspend_late(dev_priv);
1202
1203 ret = vlv_suspend_complete(dev_priv);
1204 if (ret) {
1205 drm_err(&dev_priv->drm, "Suspend complete failed: %d\n", ret);
1206 intel_power_domains_resume(dev_priv);
1207
1208 goto out;
1209 }
1210
1211 pci_disable_device(pdev);
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224 if (!(hibernation && GRAPHICS_VER(dev_priv) < 6))
1225 pci_set_power_state(pdev, PCI_D3hot);
1226
1227out:
1228 enable_rpm_wakeref_asserts(rpm);
1229 if (!dev_priv->uncore.user_forcewake_count)
1230 intel_runtime_pm_driver_release(rpm);
1231
1232 return ret;
1233}
1234
1235int i915_driver_suspend_switcheroo(struct drm_i915_private *i915,
1236 pm_message_t state)
1237{
1238 int error;
1239
1240 if (drm_WARN_ON_ONCE(&i915->drm, state.event != PM_EVENT_SUSPEND &&
1241 state.event != PM_EVENT_FREEZE))
1242 return -EINVAL;
1243
1244 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1245 return 0;
1246
1247 error = i915_drm_suspend(&i915->drm);
1248 if (error)
1249 return error;
1250
1251 return i915_drm_suspend_late(&i915->drm, false);
1252}
1253
1254static int i915_drm_resume(struct drm_device *dev)
1255{
1256 struct drm_i915_private *dev_priv = to_i915(dev);
1257 int ret;
1258
1259 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1260
1261 ret = intel_pcode_init(dev_priv);
1262 if (ret)
1263 return ret;
1264
1265 sanitize_gpu(dev_priv);
1266
1267 ret = i915_ggtt_enable_hw(dev_priv);
1268 if (ret)
1269 drm_err(&dev_priv->drm, "failed to re-enable GGTT\n");
1270
1271 i915_ggtt_resume(to_gt(dev_priv)->ggtt);
1272
1273 intel_dpt_resume(dev_priv);
1274
1275 intel_dmc_ucode_resume(dev_priv);
1276
1277 i915_restore_display(dev_priv);
1278 intel_pps_unlock_regs_wa(dev_priv);
1279
1280 intel_init_pch_refclk(dev_priv);
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292 intel_runtime_pm_enable_interrupts(dev_priv);
1293
1294 if (HAS_DISPLAY(dev_priv))
1295 drm_mode_config_reset(dev);
1296
1297 i915_gem_resume(dev_priv);
1298
1299 intel_modeset_init_hw(dev_priv);
1300 intel_init_clock_gating(dev_priv);
1301 intel_hpd_init(dev_priv);
1302
1303
1304 intel_dp_mst_resume(dev_priv);
1305 intel_display_resume(dev);
1306
1307 intel_hpd_poll_disable(dev_priv);
1308 if (HAS_DISPLAY(dev_priv))
1309 drm_kms_helper_poll_enable(dev);
1310
1311 intel_opregion_resume(dev_priv);
1312
1313 intel_fbdev_set_suspend(dev, FBINFO_STATE_RUNNING, false);
1314
1315 intel_power_domains_enable(dev_priv);
1316
1317 intel_gvt_resume(dev_priv);
1318
1319 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1320
1321 return 0;
1322}
1323
1324static int i915_drm_resume_early(struct drm_device *dev)
1325{
1326 struct drm_i915_private *dev_priv = to_i915(dev);
1327 struct pci_dev *pdev = to_pci_dev(dev_priv->drm.dev);
1328 int ret;
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350 ret = pci_set_power_state(pdev, PCI_D0);
1351 if (ret) {
1352 drm_err(&dev_priv->drm,
1353 "failed to set PCI D0 power state (%d)\n", ret);
1354 return ret;
1355 }
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370 if (pci_enable_device(pdev))
1371 return -EIO;
1372
1373 pci_set_master(pdev);
1374
1375 disable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1376
1377 ret = vlv_resume_prepare(dev_priv, false);
1378 if (ret)
1379 drm_err(&dev_priv->drm,
1380 "Resume prepare failed: %d, continuing anyway\n", ret);
1381
1382 intel_uncore_resume_early(&dev_priv->uncore);
1383
1384 intel_gt_check_and_clear_faults(to_gt(dev_priv));
1385
1386 intel_display_power_resume_early(dev_priv);
1387
1388 intel_power_domains_resume(dev_priv);
1389
1390 enable_rpm_wakeref_asserts(&dev_priv->runtime_pm);
1391
1392 return ret;
1393}
1394
1395int i915_driver_resume_switcheroo(struct drm_i915_private *i915)
1396{
1397 int ret;
1398
1399 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1400 return 0;
1401
1402 ret = i915_drm_resume_early(&i915->drm);
1403 if (ret)
1404 return ret;
1405
1406 return i915_drm_resume(&i915->drm);
1407}
1408
1409static int i915_pm_prepare(struct device *kdev)
1410{
1411 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1412
1413 if (!i915) {
1414 dev_err(kdev, "DRM not initialized, aborting suspend.\n");
1415 return -ENODEV;
1416 }
1417
1418 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1419 return 0;
1420
1421 return i915_drm_prepare(&i915->drm);
1422}
1423
1424static int i915_pm_suspend(struct device *kdev)
1425{
1426 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1427
1428 if (!i915) {
1429 dev_err(kdev, "DRM not initialized, aborting suspend.\n");
1430 return -ENODEV;
1431 }
1432
1433 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1434 return 0;
1435
1436 return i915_drm_suspend(&i915->drm);
1437}
1438
1439static int i915_pm_suspend_late(struct device *kdev)
1440{
1441 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1453 return 0;
1454
1455 return i915_drm_suspend_late(&i915->drm, false);
1456}
1457
1458static int i915_pm_poweroff_late(struct device *kdev)
1459{
1460 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1461
1462 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1463 return 0;
1464
1465 return i915_drm_suspend_late(&i915->drm, true);
1466}
1467
1468static int i915_pm_resume_early(struct device *kdev)
1469{
1470 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1471
1472 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1473 return 0;
1474
1475 return i915_drm_resume_early(&i915->drm);
1476}
1477
1478static int i915_pm_resume(struct device *kdev)
1479{
1480 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1481
1482 if (i915->drm.switch_power_state == DRM_SWITCH_POWER_OFF)
1483 return 0;
1484
1485 return i915_drm_resume(&i915->drm);
1486}
1487
1488
1489static int i915_pm_freeze(struct device *kdev)
1490{
1491 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1492 int ret;
1493
1494 if (i915->drm.switch_power_state != DRM_SWITCH_POWER_OFF) {
1495 ret = i915_drm_suspend(&i915->drm);
1496 if (ret)
1497 return ret;
1498 }
1499
1500 ret = i915_gem_freeze(i915);
1501 if (ret)
1502 return ret;
1503
1504 return 0;
1505}
1506
1507static int i915_pm_freeze_late(struct device *kdev)
1508{
1509 struct drm_i915_private *i915 = kdev_to_i915(kdev);
1510 int ret;
1511
1512 if (i915->drm.switch_power_state != DRM_SWITCH_POWER_OFF) {
1513 ret = i915_drm_suspend_late(&i915->drm, true);
1514 if (ret)
1515 return ret;
1516 }
1517
1518 ret = i915_gem_freeze_late(i915);
1519 if (ret)
1520 return ret;
1521
1522 return 0;
1523}
1524
1525
1526static int i915_pm_thaw_early(struct device *kdev)
1527{
1528 return i915_pm_resume_early(kdev);
1529}
1530
1531static int i915_pm_thaw(struct device *kdev)
1532{
1533 return i915_pm_resume(kdev);
1534}
1535
1536
1537static int i915_pm_restore_early(struct device *kdev)
1538{
1539 return i915_pm_resume_early(kdev);
1540}
1541
1542static int i915_pm_restore(struct device *kdev)
1543{
1544 return i915_pm_resume(kdev);
1545}
1546
1547static int intel_runtime_suspend(struct device *kdev)
1548{
1549 struct drm_i915_private *dev_priv = kdev_to_i915(kdev);
1550 struct intel_runtime_pm *rpm = &dev_priv->runtime_pm;
1551 int ret;
1552
1553 if (drm_WARN_ON_ONCE(&dev_priv->drm, !HAS_RUNTIME_PM(dev_priv)))
1554 return -ENODEV;
1555
1556 drm_dbg_kms(&dev_priv->drm, "Suspending device\n");
1557
1558 disable_rpm_wakeref_asserts(rpm);
1559
1560
1561
1562
1563
1564 i915_gem_runtime_suspend(dev_priv);
1565
1566 intel_gt_runtime_suspend(to_gt(dev_priv));
1567
1568 intel_runtime_pm_disable_interrupts(dev_priv);
1569
1570 intel_uncore_suspend(&dev_priv->uncore);
1571
1572 intel_display_power_suspend(dev_priv);
1573
1574 ret = vlv_suspend_complete(dev_priv);
1575 if (ret) {
1576 drm_err(&dev_priv->drm,
1577 "Runtime suspend failed, disabling it (%d)\n", ret);
1578 intel_uncore_runtime_resume(&dev_priv->uncore);
1579
1580 intel_runtime_pm_enable_interrupts(dev_priv);
1581
1582 intel_gt_runtime_resume(to_gt(dev_priv));
1583
1584 enable_rpm_wakeref_asserts(rpm);
1585
1586 return ret;
1587 }
1588
1589 enable_rpm_wakeref_asserts(rpm);
1590 intel_runtime_pm_driver_release(rpm);
1591
1592 if (intel_uncore_arm_unclaimed_mmio_detection(&dev_priv->uncore))
1593 drm_err(&dev_priv->drm,
1594 "Unclaimed access detected prior to suspending\n");
1595
1596 rpm->suspended = true;
1597
1598
1599
1600
1601
1602 if (IS_BROADWELL(dev_priv)) {
1603
1604
1605
1606
1607
1608
1609 intel_opregion_notify_adapter(dev_priv, PCI_D3hot);
1610 } else {
1611
1612
1613
1614
1615
1616
1617
1618 intel_opregion_notify_adapter(dev_priv, PCI_D1);
1619 }
1620
1621 assert_forcewakes_inactive(&dev_priv->uncore);
1622
1623 if (!IS_VALLEYVIEW(dev_priv) && !IS_CHERRYVIEW(dev_priv))
1624 intel_hpd_poll_enable(dev_priv);
1625
1626 drm_dbg_kms(&dev_priv->drm, "Device suspended\n");
1627 return 0;
1628}
1629
1630static int intel_runtime_resume(struct device *kdev)
1631{
1632 struct drm_i915_private *dev_priv = kdev_to_i915(kdev);
1633 struct intel_runtime_pm *rpm = &dev_priv->runtime_pm;
1634 int ret;
1635
1636 if (drm_WARN_ON_ONCE(&dev_priv->drm, !HAS_RUNTIME_PM(dev_priv)))
1637 return -ENODEV;
1638
1639 drm_dbg_kms(&dev_priv->drm, "Resuming device\n");
1640
1641 drm_WARN_ON_ONCE(&dev_priv->drm, atomic_read(&rpm->wakeref_count));
1642 disable_rpm_wakeref_asserts(rpm);
1643
1644 intel_opregion_notify_adapter(dev_priv, PCI_D0);
1645 rpm->suspended = false;
1646 if (intel_uncore_unclaimed_mmio(&dev_priv->uncore))
1647 drm_dbg(&dev_priv->drm,
1648 "Unclaimed access during suspend, bios?\n");
1649
1650 intel_display_power_resume(dev_priv);
1651
1652 ret = vlv_resume_prepare(dev_priv, true);
1653
1654 intel_uncore_runtime_resume(&dev_priv->uncore);
1655
1656 intel_runtime_pm_enable_interrupts(dev_priv);
1657
1658
1659
1660
1661
1662 intel_gt_runtime_resume(to_gt(dev_priv));
1663
1664
1665
1666
1667
1668
1669 if (!IS_VALLEYVIEW(dev_priv) && !IS_CHERRYVIEW(dev_priv)) {
1670 intel_hpd_init(dev_priv);
1671 intel_hpd_poll_disable(dev_priv);
1672 }
1673
1674 intel_enable_ipc(dev_priv);
1675
1676 enable_rpm_wakeref_asserts(rpm);
1677
1678 if (ret)
1679 drm_err(&dev_priv->drm,
1680 "Runtime resume failed, disabling it (%d)\n", ret);
1681 else
1682 drm_dbg_kms(&dev_priv->drm, "Device resumed\n");
1683
1684 return ret;
1685}
1686
1687const struct dev_pm_ops i915_pm_ops = {
1688
1689
1690
1691
1692 .prepare = i915_pm_prepare,
1693 .suspend = i915_pm_suspend,
1694 .suspend_late = i915_pm_suspend_late,
1695 .resume_early = i915_pm_resume_early,
1696 .resume = i915_pm_resume,
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711
1712
1713 .freeze = i915_pm_freeze,
1714 .freeze_late = i915_pm_freeze_late,
1715 .thaw_early = i915_pm_thaw_early,
1716 .thaw = i915_pm_thaw,
1717 .poweroff = i915_pm_suspend,
1718 .poweroff_late = i915_pm_poweroff_late,
1719 .restore_early = i915_pm_restore_early,
1720 .restore = i915_pm_restore,
1721
1722
1723 .runtime_suspend = intel_runtime_suspend,
1724 .runtime_resume = intel_runtime_resume,
1725};
1726
1727static const struct file_operations i915_driver_fops = {
1728 .owner = THIS_MODULE,
1729 .open = drm_open,
1730 .release = drm_release_noglobal,
1731 .unlocked_ioctl = drm_ioctl,
1732 .mmap = i915_gem_mmap,
1733 .poll = drm_poll,
1734 .read = drm_read,
1735 .compat_ioctl = i915_ioc32_compat_ioctl,
1736 .llseek = noop_llseek,
1737#ifdef CONFIG_PROC_FS
1738 .show_fdinfo = i915_drm_client_fdinfo,
1739#endif
1740};
1741
1742static int
1743i915_gem_reject_pin_ioctl(struct drm_device *dev, void *data,
1744 struct drm_file *file)
1745{
1746 return -ENODEV;
1747}
1748
1749static const struct drm_ioctl_desc i915_ioctls[] = {
1750 DRM_IOCTL_DEF_DRV(I915_INIT, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1751 DRM_IOCTL_DEF_DRV(I915_FLUSH, drm_noop, DRM_AUTH),
1752 DRM_IOCTL_DEF_DRV(I915_FLIP, drm_noop, DRM_AUTH),
1753 DRM_IOCTL_DEF_DRV(I915_BATCHBUFFER, drm_noop, DRM_AUTH),
1754 DRM_IOCTL_DEF_DRV(I915_IRQ_EMIT, drm_noop, DRM_AUTH),
1755 DRM_IOCTL_DEF_DRV(I915_IRQ_WAIT, drm_noop, DRM_AUTH),
1756 DRM_IOCTL_DEF_DRV(I915_GETPARAM, i915_getparam_ioctl, DRM_RENDER_ALLOW),
1757 DRM_IOCTL_DEF_DRV(I915_SETPARAM, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1758 DRM_IOCTL_DEF_DRV(I915_ALLOC, drm_noop, DRM_AUTH),
1759 DRM_IOCTL_DEF_DRV(I915_FREE, drm_noop, DRM_AUTH),
1760 DRM_IOCTL_DEF_DRV(I915_INIT_HEAP, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1761 DRM_IOCTL_DEF_DRV(I915_CMDBUFFER, drm_noop, DRM_AUTH),
1762 DRM_IOCTL_DEF_DRV(I915_DESTROY_HEAP, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1763 DRM_IOCTL_DEF_DRV(I915_SET_VBLANK_PIPE, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1764 DRM_IOCTL_DEF_DRV(I915_GET_VBLANK_PIPE, drm_noop, DRM_AUTH),
1765 DRM_IOCTL_DEF_DRV(I915_VBLANK_SWAP, drm_noop, DRM_AUTH),
1766 DRM_IOCTL_DEF_DRV(I915_HWS_ADDR, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1767 DRM_IOCTL_DEF_DRV(I915_GEM_INIT, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1768 DRM_IOCTL_DEF_DRV(I915_GEM_EXECBUFFER, drm_invalid_op, DRM_AUTH),
1769 DRM_IOCTL_DEF_DRV(I915_GEM_EXECBUFFER2_WR, i915_gem_execbuffer2_ioctl, DRM_RENDER_ALLOW),
1770 DRM_IOCTL_DEF_DRV(I915_GEM_PIN, i915_gem_reject_pin_ioctl, DRM_AUTH|DRM_ROOT_ONLY),
1771 DRM_IOCTL_DEF_DRV(I915_GEM_UNPIN, i915_gem_reject_pin_ioctl, DRM_AUTH|DRM_ROOT_ONLY),
1772 DRM_IOCTL_DEF_DRV(I915_GEM_BUSY, i915_gem_busy_ioctl, DRM_RENDER_ALLOW),
1773 DRM_IOCTL_DEF_DRV(I915_GEM_SET_CACHING, i915_gem_set_caching_ioctl, DRM_RENDER_ALLOW),
1774 DRM_IOCTL_DEF_DRV(I915_GEM_GET_CACHING, i915_gem_get_caching_ioctl, DRM_RENDER_ALLOW),
1775 DRM_IOCTL_DEF_DRV(I915_GEM_THROTTLE, i915_gem_throttle_ioctl, DRM_RENDER_ALLOW),
1776 DRM_IOCTL_DEF_DRV(I915_GEM_ENTERVT, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1777 DRM_IOCTL_DEF_DRV(I915_GEM_LEAVEVT, drm_noop, DRM_AUTH|DRM_MASTER|DRM_ROOT_ONLY),
1778 DRM_IOCTL_DEF_DRV(I915_GEM_CREATE, i915_gem_create_ioctl, DRM_RENDER_ALLOW),
1779 DRM_IOCTL_DEF_DRV(I915_GEM_CREATE_EXT, i915_gem_create_ext_ioctl, DRM_RENDER_ALLOW),
1780 DRM_IOCTL_DEF_DRV(I915_GEM_PREAD, i915_gem_pread_ioctl, DRM_RENDER_ALLOW),
1781 DRM_IOCTL_DEF_DRV(I915_GEM_PWRITE, i915_gem_pwrite_ioctl, DRM_RENDER_ALLOW),
1782 DRM_IOCTL_DEF_DRV(I915_GEM_MMAP, i915_gem_mmap_ioctl, DRM_RENDER_ALLOW),
1783 DRM_IOCTL_DEF_DRV(I915_GEM_MMAP_OFFSET, i915_gem_mmap_offset_ioctl, DRM_RENDER_ALLOW),
1784 DRM_IOCTL_DEF_DRV(I915_GEM_SET_DOMAIN, i915_gem_set_domain_ioctl, DRM_RENDER_ALLOW),
1785 DRM_IOCTL_DEF_DRV(I915_GEM_SW_FINISH, i915_gem_sw_finish_ioctl, DRM_RENDER_ALLOW),
1786 DRM_IOCTL_DEF_DRV(I915_GEM_SET_TILING, i915_gem_set_tiling_ioctl, DRM_RENDER_ALLOW),
1787 DRM_IOCTL_DEF_DRV(I915_GEM_GET_TILING, i915_gem_get_tiling_ioctl, DRM_RENDER_ALLOW),
1788 DRM_IOCTL_DEF_DRV(I915_GEM_GET_APERTURE, i915_gem_get_aperture_ioctl, DRM_RENDER_ALLOW),
1789 DRM_IOCTL_DEF_DRV(I915_GET_PIPE_FROM_CRTC_ID, intel_get_pipe_from_crtc_id_ioctl, 0),
1790 DRM_IOCTL_DEF_DRV(I915_GEM_MADVISE, i915_gem_madvise_ioctl, DRM_RENDER_ALLOW),
1791 DRM_IOCTL_DEF_DRV(I915_OVERLAY_PUT_IMAGE, intel_overlay_put_image_ioctl, DRM_MASTER),
1792 DRM_IOCTL_DEF_DRV(I915_OVERLAY_ATTRS, intel_overlay_attrs_ioctl, DRM_MASTER),
1793 DRM_IOCTL_DEF_DRV(I915_SET_SPRITE_COLORKEY, intel_sprite_set_colorkey_ioctl, DRM_MASTER),
1794 DRM_IOCTL_DEF_DRV(I915_GET_SPRITE_COLORKEY, drm_noop, DRM_MASTER),
1795 DRM_IOCTL_DEF_DRV(I915_GEM_WAIT, i915_gem_wait_ioctl, DRM_RENDER_ALLOW),
1796 DRM_IOCTL_DEF_DRV(I915_GEM_CONTEXT_CREATE_EXT, i915_gem_context_create_ioctl, DRM_RENDER_ALLOW),
1797 DRM_IOCTL_DEF_DRV(I915_GEM_CONTEXT_DESTROY, i915_gem_context_destroy_ioctl, DRM_RENDER_ALLOW),
1798 DRM_IOCTL_DEF_DRV(I915_REG_READ, i915_reg_read_ioctl, DRM_RENDER_ALLOW),
1799 DRM_IOCTL_DEF_DRV(I915_GET_RESET_STATS, i915_gem_context_reset_stats_ioctl, DRM_RENDER_ALLOW),
1800 DRM_IOCTL_DEF_DRV(I915_GEM_USERPTR, i915_gem_userptr_ioctl, DRM_RENDER_ALLOW),
1801 DRM_IOCTL_DEF_DRV(I915_GEM_CONTEXT_GETPARAM, i915_gem_context_getparam_ioctl, DRM_RENDER_ALLOW),
1802 DRM_IOCTL_DEF_DRV(I915_GEM_CONTEXT_SETPARAM, i915_gem_context_setparam_ioctl, DRM_RENDER_ALLOW),
1803 DRM_IOCTL_DEF_DRV(I915_PERF_OPEN, i915_perf_open_ioctl, DRM_RENDER_ALLOW),
1804 DRM_IOCTL_DEF_DRV(I915_PERF_ADD_CONFIG, i915_perf_add_config_ioctl, DRM_RENDER_ALLOW),
1805 DRM_IOCTL_DEF_DRV(I915_PERF_REMOVE_CONFIG, i915_perf_remove_config_ioctl, DRM_RENDER_ALLOW),
1806 DRM_IOCTL_DEF_DRV(I915_QUERY, i915_query_ioctl, DRM_RENDER_ALLOW),
1807 DRM_IOCTL_DEF_DRV(I915_GEM_VM_CREATE, i915_gem_vm_create_ioctl, DRM_RENDER_ALLOW),
1808 DRM_IOCTL_DEF_DRV(I915_GEM_VM_DESTROY, i915_gem_vm_destroy_ioctl, DRM_RENDER_ALLOW),
1809};
1810
1811
1812
1813
1814
1815
1816
1817
1818
1819
1820
1821
1822#define DRIVER_MAJOR 1
1823#define DRIVER_MINOR 6
1824#define DRIVER_PATCHLEVEL 0
1825
1826static const struct drm_driver i915_drm_driver = {
1827
1828
1829
1830 .driver_features =
1831 DRIVER_GEM |
1832 DRIVER_RENDER | DRIVER_MODESET | DRIVER_ATOMIC | DRIVER_SYNCOBJ |
1833 DRIVER_SYNCOBJ_TIMELINE,
1834 .release = i915_driver_release,
1835 .open = i915_driver_open,
1836 .lastclose = i915_driver_lastclose,
1837 .postclose = i915_driver_postclose,
1838
1839 .prime_handle_to_fd = drm_gem_prime_handle_to_fd,
1840 .prime_fd_to_handle = drm_gem_prime_fd_to_handle,
1841 .gem_prime_import = i915_gem_prime_import,
1842
1843 .dumb_create = i915_gem_dumb_create,
1844 .dumb_map_offset = i915_gem_dumb_mmap_offset,
1845
1846 .ioctls = i915_ioctls,
1847 .num_ioctls = ARRAY_SIZE(i915_ioctls),
1848 .fops = &i915_driver_fops,
1849 .name = DRIVER_NAME,
1850 .desc = DRIVER_DESC,
1851 .date = DRIVER_DATE,
1852 .major = DRIVER_MAJOR,
1853 .minor = DRIVER_MINOR,
1854 .patchlevel = DRIVER_PATCHLEVEL,
1855};
1856