1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30#ifndef _I915_DRV_H_
31#define _I915_DRV_H_
32
33#include <uapi/drm/i915_drm.h>
34
35#include "i915_reg.h"
36#include "intel_bios.h"
37#include "intel_ringbuffer.h"
38#include <linux/io-mapping.h>
39#include <linux/i2c.h>
40#include <linux/i2c-algo-bit.h>
41#include <drm/intel-gtt.h>
42#include <linux/backlight.h>
43#include <linux/intel-iommu.h>
44#include <linux/kref.h>
45#include <linux/pm_qos.h>
46
47
48
49
50#define DRIVER_AUTHOR "Tungsten Graphics, Inc."
51
52#define DRIVER_NAME "i915"
53#define DRIVER_DESC "Intel Graphics"
54#define DRIVER_DATE "20080730"
55
56enum pipe {
57 INVALID_PIPE = -1,
58 PIPE_A = 0,
59 PIPE_B,
60 PIPE_C,
61 I915_MAX_PIPES
62};
63#define pipe_name(p) ((p) + 'A')
64
65enum transcoder {
66 TRANSCODER_A = 0,
67 TRANSCODER_B,
68 TRANSCODER_C,
69 TRANSCODER_EDP = 0xF,
70};
71#define transcoder_name(t) ((t) + 'A')
72
73enum plane {
74 PLANE_A = 0,
75 PLANE_B,
76 PLANE_C,
77};
78#define plane_name(p) ((p) + 'A')
79
80#define sprite_name(p, s) ((p) * dev_priv->num_plane + (s) + 'A')
81
82enum port {
83 PORT_A = 0,
84 PORT_B,
85 PORT_C,
86 PORT_D,
87 PORT_E,
88 I915_MAX_PORTS
89};
90#define port_name(p) ((p) + 'A')
91
92enum intel_display_power_domain {
93 POWER_DOMAIN_PIPE_A,
94 POWER_DOMAIN_PIPE_B,
95 POWER_DOMAIN_PIPE_C,
96 POWER_DOMAIN_PIPE_A_PANEL_FITTER,
97 POWER_DOMAIN_PIPE_B_PANEL_FITTER,
98 POWER_DOMAIN_PIPE_C_PANEL_FITTER,
99 POWER_DOMAIN_TRANSCODER_A,
100 POWER_DOMAIN_TRANSCODER_B,
101 POWER_DOMAIN_TRANSCODER_C,
102 POWER_DOMAIN_TRANSCODER_EDP,
103 POWER_DOMAIN_VGA,
104 POWER_DOMAIN_INIT,
105
106 POWER_DOMAIN_NUM,
107};
108
109#define POWER_DOMAIN_MASK (BIT(POWER_DOMAIN_NUM) - 1)
110
111#define POWER_DOMAIN_PIPE(pipe) ((pipe) + POWER_DOMAIN_PIPE_A)
112#define POWER_DOMAIN_PIPE_PANEL_FITTER(pipe) \
113 ((pipe) + POWER_DOMAIN_PIPE_A_PANEL_FITTER)
114#define POWER_DOMAIN_TRANSCODER(tran) \
115 ((tran) == TRANSCODER_EDP ? POWER_DOMAIN_TRANSCODER_EDP : \
116 (tran) + POWER_DOMAIN_TRANSCODER_A)
117
118#define HSW_ALWAYS_ON_POWER_DOMAINS ( \
119 BIT(POWER_DOMAIN_PIPE_A) | \
120 BIT(POWER_DOMAIN_TRANSCODER_EDP))
121#define BDW_ALWAYS_ON_POWER_DOMAINS ( \
122 BIT(POWER_DOMAIN_PIPE_A) | \
123 BIT(POWER_DOMAIN_TRANSCODER_EDP) | \
124 BIT(POWER_DOMAIN_PIPE_A_PANEL_FITTER))
125
126enum hpd_pin {
127 HPD_NONE = 0,
128 HPD_PORT_A = HPD_NONE,
129 HPD_TV = HPD_NONE,
130 HPD_CRT,
131 HPD_SDVO_B,
132 HPD_SDVO_C,
133 HPD_PORT_B,
134 HPD_PORT_C,
135 HPD_PORT_D,
136 HPD_NUM_PINS
137};
138
139#define I915_GEM_GPU_DOMAINS \
140 (I915_GEM_DOMAIN_RENDER | \
141 I915_GEM_DOMAIN_SAMPLER | \
142 I915_GEM_DOMAIN_COMMAND | \
143 I915_GEM_DOMAIN_INSTRUCTION | \
144 I915_GEM_DOMAIN_VERTEX)
145
146#define for_each_pipe(p) for ((p) = 0; (p) < INTEL_INFO(dev)->num_pipes; (p)++)
147
148#define for_each_encoder_on_crtc(dev, __crtc, intel_encoder) \
149 list_for_each_entry((intel_encoder), &(dev)->mode_config.encoder_list, base.head) \
150 if ((intel_encoder)->base.crtc == (__crtc))
151
152struct drm_i915_private;
153
154enum intel_dpll_id {
155 DPLL_ID_PRIVATE = -1,
156
157 DPLL_ID_PCH_PLL_A,
158 DPLL_ID_PCH_PLL_B,
159};
160#define I915_NUM_PLLS 2
161
162struct intel_dpll_hw_state {
163 uint32_t dpll;
164 uint32_t dpll_md;
165 uint32_t fp0;
166 uint32_t fp1;
167};
168
169struct intel_shared_dpll {
170 int refcount;
171 int active;
172 bool on;
173 const char *name;
174
175 enum intel_dpll_id id;
176 struct intel_dpll_hw_state hw_state;
177 void (*mode_set)(struct drm_i915_private *dev_priv,
178 struct intel_shared_dpll *pll);
179 void (*enable)(struct drm_i915_private *dev_priv,
180 struct intel_shared_dpll *pll);
181 void (*disable)(struct drm_i915_private *dev_priv,
182 struct intel_shared_dpll *pll);
183 bool (*get_hw_state)(struct drm_i915_private *dev_priv,
184 struct intel_shared_dpll *pll,
185 struct intel_dpll_hw_state *hw_state);
186};
187
188
189struct intel_link_m_n {
190 uint32_t tu;
191 uint32_t gmch_m;
192 uint32_t gmch_n;
193 uint32_t link_m;
194 uint32_t link_n;
195};
196
197void intel_link_compute_m_n(int bpp, int nlanes,
198 int pixel_clock, int link_clock,
199 struct intel_link_m_n *m_n);
200
201struct intel_ddi_plls {
202 int spll_refcount;
203 int wrpll1_refcount;
204 int wrpll2_refcount;
205};
206
207
208
209
210
211
212
213
214
215
216
217#define DRIVER_MAJOR 1
218#define DRIVER_MINOR 6
219#define DRIVER_PATCHLEVEL 0
220
221#define WATCH_LISTS 0
222#define WATCH_GTT 0
223
224#define I915_GEM_PHYS_CURSOR_0 1
225#define I915_GEM_PHYS_CURSOR_1 2
226#define I915_GEM_PHYS_OVERLAY_REGS 3
227#define I915_MAX_PHYS_OBJECT (I915_GEM_PHYS_OVERLAY_REGS)
228
229struct drm_i915_gem_phys_object {
230 int id;
231 struct page **page_list;
232 drm_dma_handle_t *handle;
233 struct drm_i915_gem_object *cur_obj;
234};
235
236struct opregion_header;
237struct opregion_acpi;
238struct opregion_swsci;
239struct opregion_asle;
240
241struct intel_opregion {
242 struct opregion_header __iomem *header;
243 struct opregion_acpi __iomem *acpi;
244 struct opregion_swsci __iomem *swsci;
245 u32 swsci_gbda_sub_functions;
246 u32 swsci_sbcb_sub_functions;
247 struct opregion_asle __iomem *asle;
248 void __iomem *vbt;
249 u32 __iomem *lid_state;
250 struct work_struct asle_work;
251};
252#define OPREGION_SIZE (8*1024)
253
254struct intel_overlay;
255struct intel_overlay_error_state;
256
257struct drm_i915_master_private {
258 drm_local_map_t *sarea;
259 struct _drm_i915_sarea *sarea_priv;
260};
261#define I915_FENCE_REG_NONE -1
262#define I915_MAX_NUM_FENCES 32
263
264#define I915_MAX_NUM_FENCE_BITS 6
265
266struct drm_i915_fence_reg {
267 struct list_head lru_list;
268 struct drm_i915_gem_object *obj;
269 int pin_count;
270};
271
272struct sdvo_device_mapping {
273 u8 initialized;
274 u8 dvo_port;
275 u8 slave_addr;
276 u8 dvo_wiring;
277 u8 i2c_pin;
278 u8 ddc_pin;
279};
280
281struct intel_display_error_state;
282
283struct drm_i915_error_state {
284 struct kref ref;
285 u32 eir;
286 u32 pgtbl_er;
287 u32 ier;
288 u32 ccid;
289 u32 derrmr;
290 u32 forcewake;
291 bool waiting[I915_NUM_RINGS];
292 u32 pipestat[I915_MAX_PIPES];
293 u32 tail[I915_NUM_RINGS];
294 u32 head[I915_NUM_RINGS];
295 u32 ctl[I915_NUM_RINGS];
296 u32 ipeir[I915_NUM_RINGS];
297 u32 ipehr[I915_NUM_RINGS];
298 u32 instdone[I915_NUM_RINGS];
299 u32 acthd[I915_NUM_RINGS];
300 u32 semaphore_mboxes[I915_NUM_RINGS][I915_NUM_RINGS - 1];
301 u32 semaphore_seqno[I915_NUM_RINGS][I915_NUM_RINGS - 1];
302 u32 rc_psmi[I915_NUM_RINGS];
303
304 u32 cpu_ring_head[I915_NUM_RINGS];
305 u32 cpu_ring_tail[I915_NUM_RINGS];
306 u32 error;
307 u32 err_int;
308 u32 bbstate[I915_NUM_RINGS];
309 u32 instpm[I915_NUM_RINGS];
310 u32 instps[I915_NUM_RINGS];
311 u32 extra_instdone[I915_NUM_INSTDONE_REG];
312 u32 seqno[I915_NUM_RINGS];
313 u64 bbaddr;
314 u32 fault_reg[I915_NUM_RINGS];
315 u32 done_reg;
316 u32 faddr[I915_NUM_RINGS];
317 u64 fence[I915_MAX_NUM_FENCES];
318 struct timeval time;
319 struct drm_i915_error_ring {
320 struct drm_i915_error_object {
321 int page_count;
322 u32 gtt_offset;
323 u32 *pages[0];
324 } *ringbuffer, *batchbuffer, *ctx;
325 struct drm_i915_error_request {
326 long jiffies;
327 u32 seqno;
328 u32 tail;
329 } *requests;
330 int num_requests;
331 } ring[I915_NUM_RINGS];
332 struct drm_i915_error_buffer {
333 u32 size;
334 u32 name;
335 u32 rseqno, wseqno;
336 u32 gtt_offset;
337 u32 read_domains;
338 u32 write_domain;
339 s32 fence_reg:I915_MAX_NUM_FENCE_BITS;
340 s32 pinned:2;
341 u32 tiling:2;
342 u32 dirty:1;
343 u32 purgeable:1;
344 s32 ring:4;
345 u32 cache_level:3;
346 } **active_bo, **pinned_bo;
347 u32 *active_bo_count, *pinned_bo_count;
348 struct intel_overlay_error_state *overlay;
349 struct intel_display_error_state *display;
350 int hangcheck_score[I915_NUM_RINGS];
351 enum intel_ring_hangcheck_action hangcheck_action[I915_NUM_RINGS];
352};
353
354struct intel_crtc_config;
355struct intel_crtc;
356struct intel_limit;
357struct dpll;
358
359struct drm_i915_display_funcs {
360 bool (*fbc_enabled)(struct drm_device *dev);
361 void (*enable_fbc)(struct drm_crtc *crtc, unsigned long interval);
362 void (*disable_fbc)(struct drm_device *dev);
363 int (*get_display_clock_speed)(struct drm_device *dev);
364 int (*get_fifo_size)(struct drm_device *dev, int plane);
365
366
367
368
369
370
371
372
373
374
375
376
377
378 bool (*find_dpll)(const struct intel_limit *limit,
379 struct drm_crtc *crtc,
380 int target, int refclk,
381 struct dpll *match_clock,
382 struct dpll *best_clock);
383 void (*update_wm)(struct drm_crtc *crtc);
384 void (*update_sprite_wm)(struct drm_plane *plane,
385 struct drm_crtc *crtc,
386 uint32_t sprite_width, int pixel_size,
387 bool enable, bool scaled);
388 void (*modeset_global_resources)(struct drm_device *dev);
389
390
391 bool (*get_pipe_config)(struct intel_crtc *,
392 struct intel_crtc_config *);
393 int (*crtc_mode_set)(struct drm_crtc *crtc,
394 int x, int y,
395 struct drm_framebuffer *old_fb);
396 void (*crtc_enable)(struct drm_crtc *crtc);
397 void (*crtc_disable)(struct drm_crtc *crtc);
398 void (*off)(struct drm_crtc *crtc);
399 void (*write_eld)(struct drm_connector *connector,
400 struct drm_crtc *crtc,
401 struct drm_display_mode *mode);
402 void (*fdi_link_train)(struct drm_crtc *crtc);
403 void (*init_clock_gating)(struct drm_device *dev);
404 int (*queue_flip)(struct drm_device *dev, struct drm_crtc *crtc,
405 struct drm_framebuffer *fb,
406 struct drm_i915_gem_object *obj,
407 uint32_t flags);
408 int (*update_plane)(struct drm_crtc *crtc, struct drm_framebuffer *fb,
409 int x, int y);
410 void (*hpd_irq_setup)(struct drm_device *dev);
411
412
413
414
415
416};
417
418struct intel_uncore_funcs {
419 void (*force_wake_get)(struct drm_i915_private *dev_priv);
420 void (*force_wake_put)(struct drm_i915_private *dev_priv);
421
422 uint8_t (*mmio_readb)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
423 uint16_t (*mmio_readw)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
424 uint32_t (*mmio_readl)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
425 uint64_t (*mmio_readq)(struct drm_i915_private *dev_priv, off_t offset, bool trace);
426
427 void (*mmio_writeb)(struct drm_i915_private *dev_priv, off_t offset,
428 uint8_t val, bool trace);
429 void (*mmio_writew)(struct drm_i915_private *dev_priv, off_t offset,
430 uint16_t val, bool trace);
431 void (*mmio_writel)(struct drm_i915_private *dev_priv, off_t offset,
432 uint32_t val, bool trace);
433 void (*mmio_writeq)(struct drm_i915_private *dev_priv, off_t offset,
434 uint64_t val, bool trace);
435};
436
437struct intel_uncore {
438 spinlock_t lock;
439
440 struct intel_uncore_funcs funcs;
441
442 unsigned fifo_count;
443 unsigned forcewake_count;
444
445 struct delayed_work force_wake_work;
446};
447
448#define DEV_INFO_FOR_EACH_FLAG(func, sep) \
449 func(is_mobile) sep \
450 func(is_i85x) sep \
451 func(is_i915g) sep \
452 func(is_i945gm) sep \
453 func(is_g33) sep \
454 func(need_gfx_hws) sep \
455 func(is_g4x) sep \
456 func(is_pineview) sep \
457 func(is_broadwater) sep \
458 func(is_crestline) sep \
459 func(is_ivybridge) sep \
460 func(is_valleyview) sep \
461 func(is_haswell) sep \
462 func(is_preliminary) sep \
463 func(has_fbc) sep \
464 func(has_pipe_cxsr) sep \
465 func(has_hotplug) sep \
466 func(cursor_needs_physical) sep \
467 func(has_overlay) sep \
468 func(overlay_needs_physical) sep \
469 func(supports_tv) sep \
470 func(has_llc) sep \
471 func(has_ddi) sep \
472 func(has_fpga_dbg)
473
474#define DEFINE_FLAG(name) u8 name:1
475#define SEP_SEMICOLON ;
476
477struct intel_device_info {
478 u32 display_mmio_offset;
479 u8 num_pipes:3;
480 u8 gen;
481 u8 ring_mask;
482 DEV_INFO_FOR_EACH_FLAG(DEFINE_FLAG, SEP_SEMICOLON);
483};
484
485#undef DEFINE_FLAG
486#undef SEP_SEMICOLON
487
488enum i915_cache_level {
489 I915_CACHE_NONE = 0,
490 I915_CACHE_LLC,
491 I915_CACHE_L3_LLC,
492
493
494
495 I915_CACHE_WT,
496};
497
498typedef uint32_t gen6_gtt_pte_t;
499
500struct i915_address_space {
501 struct drm_mm mm;
502 struct drm_device *dev;
503 struct list_head global_link;
504 unsigned long start;
505 size_t total;
506
507 struct {
508 dma_addr_t addr;
509 struct page *page;
510 } scratch;
511
512
513
514
515
516
517
518
519
520
521 struct list_head active_list;
522
523
524
525
526
527
528
529
530
531
532
533 struct list_head inactive_list;
534
535
536 gen6_gtt_pte_t (*pte_encode)(dma_addr_t addr,
537 enum i915_cache_level level,
538 bool valid);
539 void (*clear_range)(struct i915_address_space *vm,
540 unsigned int first_entry,
541 unsigned int num_entries,
542 bool use_scratch);
543 void (*insert_entries)(struct i915_address_space *vm,
544 struct sg_table *st,
545 unsigned int first_entry,
546 enum i915_cache_level cache_level);
547 void (*cleanup)(struct i915_address_space *vm);
548};
549
550
551
552
553
554
555
556
557struct i915_gtt {
558 struct i915_address_space base;
559 size_t stolen_size;
560
561 unsigned long mappable_end;
562 struct io_mapping *mappable;
563 phys_addr_t mappable_base;
564
565
566 void __iomem *gsm;
567
568 bool do_idle_maps;
569
570 int mtrr;
571
572
573 int (*gtt_probe)(struct drm_device *dev, size_t *gtt_total,
574 size_t *stolen, phys_addr_t *mappable_base,
575 unsigned long *mappable_end);
576};
577#define gtt_total_entries(gtt) ((gtt).base.total >> PAGE_SHIFT)
578
579struct i915_hw_ppgtt {
580 struct i915_address_space base;
581 unsigned num_pd_entries;
582 union {
583 struct page **pt_pages;
584 struct page *gen8_pt_pages;
585 };
586 struct page *pd_pages;
587 int num_pd_pages;
588 int num_pt_pages;
589 union {
590 uint32_t pd_offset;
591 dma_addr_t pd_dma_addr[4];
592 };
593 union {
594 dma_addr_t *pt_dma_addr;
595 dma_addr_t *gen8_pt_dma_addr[4];
596 };
597 int (*enable)(struct drm_device *dev);
598};
599
600
601
602
603
604
605
606
607
608struct i915_vma {
609 struct drm_mm_node node;
610 struct drm_i915_gem_object *obj;
611 struct i915_address_space *vm;
612
613
614 struct list_head mm_list;
615
616 struct list_head vma_link;
617
618
619 struct list_head exec_list;
620
621
622
623
624 struct hlist_node exec_node;
625 unsigned long exec_handle;
626 struct drm_i915_gem_exec_object2 *exec_entry;
627
628};
629
630struct i915_ctx_hang_stats {
631
632 unsigned batch_pending;
633
634
635 unsigned batch_active;
636
637
638 unsigned long guilty_ts;
639
640
641 bool banned;
642};
643
644
645#define DEFAULT_CONTEXT_ID 0
646struct i915_hw_context {
647 struct kref ref;
648 int id;
649 bool is_initialized;
650 uint8_t remap_slice;
651 struct drm_i915_file_private *file_priv;
652 struct intel_ring_buffer *ring;
653 struct drm_i915_gem_object *obj;
654 struct i915_ctx_hang_stats hang_stats;
655
656 struct list_head link;
657};
658
659struct i915_fbc {
660 unsigned long size;
661 unsigned int fb_id;
662 enum plane plane;
663 int y;
664
665 struct drm_mm_node *compressed_fb;
666 struct drm_mm_node *compressed_llb;
667
668 struct intel_fbc_work {
669 struct delayed_work work;
670 struct drm_crtc *crtc;
671 struct drm_framebuffer *fb;
672 int interval;
673 } *fbc_work;
674
675 enum no_fbc_reason {
676 FBC_OK,
677 FBC_UNSUPPORTED,
678 FBC_NO_OUTPUT,
679 FBC_STOLEN_TOO_SMALL,
680 FBC_UNSUPPORTED_MODE,
681 FBC_MODE_TOO_LARGE,
682 FBC_BAD_PLANE,
683 FBC_NOT_TILED,
684 FBC_MULTIPLE_PIPES,
685 FBC_MODULE_PARAM,
686 FBC_CHIP_DEFAULT,
687 } no_fbc_reason;
688};
689
690struct i915_psr {
691 bool sink_support;
692 bool source_ok;
693};
694
695enum intel_pch {
696 PCH_NONE = 0,
697 PCH_IBX,
698 PCH_CPT,
699 PCH_LPT,
700 PCH_NOP,
701};
702
703enum intel_sbi_destination {
704 SBI_ICLK,
705 SBI_MPHY,
706};
707
708#define QUIRK_PIPEA_FORCE (1<<0)
709#define QUIRK_LVDS_SSC_DISABLE (1<<1)
710#define QUIRK_INVERT_BRIGHTNESS (1<<2)
711#define QUIRK_NO_PCH_PWM_ENABLE (1<<3)
712
713struct intel_fbdev;
714struct intel_fbc_work;
715
716struct intel_gmbus {
717 struct i2c_adapter adapter;
718 u32 force_bit;
719 u32 reg0;
720 u32 gpio_reg;
721 struct i2c_algo_bit_data bit_algo;
722 struct drm_i915_private *dev_priv;
723};
724
725struct i915_suspend_saved_registers {
726 u8 saveLBB;
727 u32 saveDSPACNTR;
728 u32 saveDSPBCNTR;
729 u32 saveDSPARB;
730 u32 savePIPEACONF;
731 u32 savePIPEBCONF;
732 u32 savePIPEASRC;
733 u32 savePIPEBSRC;
734 u32 saveFPA0;
735 u32 saveFPA1;
736 u32 saveDPLL_A;
737 u32 saveDPLL_A_MD;
738 u32 saveHTOTAL_A;
739 u32 saveHBLANK_A;
740 u32 saveHSYNC_A;
741 u32 saveVTOTAL_A;
742 u32 saveVBLANK_A;
743 u32 saveVSYNC_A;
744 u32 saveBCLRPAT_A;
745 u32 saveTRANSACONF;
746 u32 saveTRANS_HTOTAL_A;
747 u32 saveTRANS_HBLANK_A;
748 u32 saveTRANS_HSYNC_A;
749 u32 saveTRANS_VTOTAL_A;
750 u32 saveTRANS_VBLANK_A;
751 u32 saveTRANS_VSYNC_A;
752 u32 savePIPEASTAT;
753 u32 saveDSPASTRIDE;
754 u32 saveDSPASIZE;
755 u32 saveDSPAPOS;
756 u32 saveDSPAADDR;
757 u32 saveDSPASURF;
758 u32 saveDSPATILEOFF;
759 u32 savePFIT_PGM_RATIOS;
760 u32 saveBLC_HIST_CTL;
761 u32 saveBLC_PWM_CTL;
762 u32 saveBLC_PWM_CTL2;
763 u32 saveBLC_HIST_CTL_B;
764 u32 saveBLC_PWM_CTL_B;
765 u32 saveBLC_PWM_CTL2_B;
766 u32 saveBLC_CPU_PWM_CTL;
767 u32 saveBLC_CPU_PWM_CTL2;
768 u32 saveFPB0;
769 u32 saveFPB1;
770 u32 saveDPLL_B;
771 u32 saveDPLL_B_MD;
772 u32 saveHTOTAL_B;
773 u32 saveHBLANK_B;
774 u32 saveHSYNC_B;
775 u32 saveVTOTAL_B;
776 u32 saveVBLANK_B;
777 u32 saveVSYNC_B;
778 u32 saveBCLRPAT_B;
779 u32 saveTRANSBCONF;
780 u32 saveTRANS_HTOTAL_B;
781 u32 saveTRANS_HBLANK_B;
782 u32 saveTRANS_HSYNC_B;
783 u32 saveTRANS_VTOTAL_B;
784 u32 saveTRANS_VBLANK_B;
785 u32 saveTRANS_VSYNC_B;
786 u32 savePIPEBSTAT;
787 u32 saveDSPBSTRIDE;
788 u32 saveDSPBSIZE;
789 u32 saveDSPBPOS;
790 u32 saveDSPBADDR;
791 u32 saveDSPBSURF;
792 u32 saveDSPBTILEOFF;
793 u32 saveVGA0;
794 u32 saveVGA1;
795 u32 saveVGA_PD;
796 u32 saveVGACNTRL;
797 u32 saveADPA;
798 u32 saveLVDS;
799 u32 savePP_ON_DELAYS;
800 u32 savePP_OFF_DELAYS;
801 u32 saveDVOA;
802 u32 saveDVOB;
803 u32 saveDVOC;
804 u32 savePP_ON;
805 u32 savePP_OFF;
806 u32 savePP_CONTROL;
807 u32 savePP_DIVISOR;
808 u32 savePFIT_CONTROL;
809 u32 save_palette_a[256];
810 u32 save_palette_b[256];
811 u32 saveDPFC_CB_BASE;
812 u32 saveFBC_CFB_BASE;
813 u32 saveFBC_LL_BASE;
814 u32 saveFBC_CONTROL;
815 u32 saveFBC_CONTROL2;
816 u32 saveIER;
817 u32 saveIIR;
818 u32 saveIMR;
819 u32 saveDEIER;
820 u32 saveDEIMR;
821 u32 saveGTIER;
822 u32 saveGTIMR;
823 u32 saveFDI_RXA_IMR;
824 u32 saveFDI_RXB_IMR;
825 u32 saveCACHE_MODE_0;
826 u32 saveMI_ARB_STATE;
827 u32 saveSWF0[16];
828 u32 saveSWF1[16];
829 u32 saveSWF2[3];
830 u8 saveMSR;
831 u8 saveSR[8];
832 u8 saveGR[25];
833 u8 saveAR_INDEX;
834 u8 saveAR[21];
835 u8 saveDACMASK;
836 u8 saveCR[37];
837 uint64_t saveFENCE[I915_MAX_NUM_FENCES];
838 u32 saveCURACNTR;
839 u32 saveCURAPOS;
840 u32 saveCURABASE;
841 u32 saveCURBCNTR;
842 u32 saveCURBPOS;
843 u32 saveCURBBASE;
844 u32 saveCURSIZE;
845 u32 saveDP_B;
846 u32 saveDP_C;
847 u32 saveDP_D;
848 u32 savePIPEA_GMCH_DATA_M;
849 u32 savePIPEB_GMCH_DATA_M;
850 u32 savePIPEA_GMCH_DATA_N;
851 u32 savePIPEB_GMCH_DATA_N;
852 u32 savePIPEA_DP_LINK_M;
853 u32 savePIPEB_DP_LINK_M;
854 u32 savePIPEA_DP_LINK_N;
855 u32 savePIPEB_DP_LINK_N;
856 u32 saveFDI_RXA_CTL;
857 u32 saveFDI_TXA_CTL;
858 u32 saveFDI_RXB_CTL;
859 u32 saveFDI_TXB_CTL;
860 u32 savePFA_CTL_1;
861 u32 savePFB_CTL_1;
862 u32 savePFA_WIN_SZ;
863 u32 savePFB_WIN_SZ;
864 u32 savePFA_WIN_POS;
865 u32 savePFB_WIN_POS;
866 u32 savePCH_DREF_CONTROL;
867 u32 saveDISP_ARB_CTL;
868 u32 savePIPEA_DATA_M1;
869 u32 savePIPEA_DATA_N1;
870 u32 savePIPEA_LINK_M1;
871 u32 savePIPEA_LINK_N1;
872 u32 savePIPEB_DATA_M1;
873 u32 savePIPEB_DATA_N1;
874 u32 savePIPEB_LINK_M1;
875 u32 savePIPEB_LINK_N1;
876 u32 saveMCHBAR_RENDER_STANDBY;
877 u32 savePCH_PORT_HOTPLUG;
878};
879
880struct intel_gen6_power_mgmt {
881
882 struct work_struct work;
883 u32 pm_iir;
884
885
886
887 u8 cur_delay;
888 u8 min_delay;
889 u8 max_delay;
890 u8 rpe_delay;
891 u8 rp1_delay;
892 u8 rp0_delay;
893 u8 hw_max;
894
895 int last_adj;
896 enum { LOW_POWER, BETWEEN, HIGH_POWER } power;
897
898 bool enabled;
899 struct delayed_work delayed_resume_work;
900
901
902
903
904
905 struct mutex hw_lock;
906};
907
908
909extern spinlock_t mchdev_lock;
910
911struct intel_ilk_power_mgmt {
912 u8 cur_delay;
913 u8 min_delay;
914 u8 max_delay;
915 u8 fmax;
916 u8 fstart;
917
918 u64 last_count1;
919 unsigned long last_time1;
920 unsigned long chipset_power;
921 u64 last_count2;
922 struct timespec last_time2;
923 unsigned long gfx_power;
924 u8 corr;
925
926 int c_m;
927 int r_t;
928
929 struct drm_i915_gem_object *pwrctx;
930 struct drm_i915_gem_object *renderctx;
931};
932
933
934struct i915_power_well {
935
936 int count;
937};
938
939#define I915_MAX_POWER_WELLS 1
940
941struct i915_power_domains {
942
943
944
945
946 bool init_power_on;
947
948 struct mutex lock;
949 struct i915_power_well power_wells[I915_MAX_POWER_WELLS];
950};
951
952struct i915_dri1_state {
953 unsigned allow_batchbuffer : 1;
954 u32 __iomem *gfx_hws_cpu_addr;
955
956 unsigned int cpp;
957 int back_offset;
958 int front_offset;
959 int current_page;
960 int page_flipping;
961
962 uint32_t counter;
963};
964
965struct i915_ums_state {
966
967
968
969
970
971
972
973
974 int mm_suspended;
975};
976
977#define MAX_L3_SLICES 2
978struct intel_l3_parity {
979 u32 *remap_info[MAX_L3_SLICES];
980 struct work_struct error_work;
981 int which_slice;
982};
983
984struct i915_gem_mm {
985
986 struct drm_mm stolen;
987
988
989 struct list_head bound_list;
990
991
992
993
994
995 struct list_head unbound_list;
996
997
998 unsigned long stolen_base;
999
1000
1001 struct i915_hw_ppgtt *aliasing_ppgtt;
1002
1003 struct shrinker inactive_shrinker;
1004 bool shrinker_no_lock_stealing;
1005
1006
1007 struct list_head fence_list;
1008
1009
1010
1011
1012
1013
1014
1015
1016 struct delayed_work retire_work;
1017
1018
1019
1020
1021
1022
1023
1024
1025 struct delayed_work idle_work;
1026
1027
1028
1029
1030
1031 bool interruptible;
1032
1033
1034 uint32_t bit_6_swizzle_x;
1035
1036 uint32_t bit_6_swizzle_y;
1037
1038
1039 struct drm_i915_gem_phys_object *phys_objs[I915_MAX_PHYS_OBJECT];
1040
1041
1042 spinlock_t object_stat_lock;
1043 size_t object_memory;
1044 u32 object_count;
1045};
1046
1047struct drm_i915_error_state_buf {
1048 unsigned bytes;
1049 unsigned size;
1050 int err;
1051 u8 *buf;
1052 loff_t start;
1053 loff_t pos;
1054};
1055
1056struct i915_error_state_file_priv {
1057 struct drm_device *dev;
1058 struct drm_i915_error_state *error;
1059};
1060
1061struct i915_gpu_error {
1062
1063#define DRM_I915_HANGCHECK_PERIOD 1500
1064#define DRM_I915_HANGCHECK_JIFFIES msecs_to_jiffies(DRM_I915_HANGCHECK_PERIOD)
1065
1066#define DRM_I915_CTX_BAN_PERIOD DIV_ROUND_UP(8*DRM_I915_HANGCHECK_PERIOD, 1000)
1067
1068 struct timer_list hangcheck_timer;
1069
1070
1071 spinlock_t lock;
1072
1073 struct drm_i915_error_state *first_error;
1074 struct work_struct work;
1075
1076
1077 unsigned long missed_irq_rings;
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097 atomic_t reset_counter;
1098
1099
1100
1101
1102
1103
1104
1105
1106#define I915_RESET_IN_PROGRESS_FLAG 1
1107#define I915_WEDGED 0xffffffff
1108
1109
1110
1111
1112
1113 wait_queue_head_t reset_queue;
1114
1115
1116 unsigned int stop_rings;
1117
1118
1119 unsigned int test_irq_rings;
1120};
1121
1122enum modeset_restore {
1123 MODESET_ON_LID_OPEN,
1124 MODESET_DONE,
1125 MODESET_SUSPENDED,
1126};
1127
1128struct ddi_vbt_port_info {
1129 uint8_t hdmi_level_shift;
1130
1131 uint8_t supports_dvi:1;
1132 uint8_t supports_hdmi:1;
1133 uint8_t supports_dp:1;
1134};
1135
1136struct intel_vbt_data {
1137 struct drm_display_mode *lfp_lvds_vbt_mode;
1138 struct drm_display_mode *sdvo_lvds_vbt_mode;
1139
1140
1141 unsigned int int_tv_support:1;
1142 unsigned int lvds_dither:1;
1143 unsigned int lvds_vbt:1;
1144 unsigned int int_crt_support:1;
1145 unsigned int lvds_use_ssc:1;
1146 unsigned int display_clock_mode:1;
1147 unsigned int fdi_rx_polarity_inverted:1;
1148 int lvds_ssc_freq;
1149 unsigned int bios_lvds_val;
1150
1151
1152 int edp_rate;
1153 int edp_lanes;
1154 int edp_preemphasis;
1155 int edp_vswing;
1156 bool edp_initialized;
1157 bool edp_support;
1158 int edp_bpp;
1159 struct edp_power_seq edp_pps;
1160
1161
1162 struct {
1163 u16 panel_id;
1164 } dsi;
1165
1166 int crt_ddc_pin;
1167
1168 int child_dev_num;
1169 union child_device_config *child_dev;
1170
1171 struct ddi_vbt_port_info ddi_port_info[I915_MAX_PORTS];
1172};
1173
1174enum intel_ddb_partitioning {
1175 INTEL_DDB_PART_1_2,
1176 INTEL_DDB_PART_5_6,
1177};
1178
1179struct intel_wm_level {
1180 bool enable;
1181 uint32_t pri_val;
1182 uint32_t spr_val;
1183 uint32_t cur_val;
1184 uint32_t fbc_val;
1185};
1186
1187struct hsw_wm_values {
1188 uint32_t wm_pipe[3];
1189 uint32_t wm_lp[3];
1190 uint32_t wm_lp_spr[3];
1191 uint32_t wm_linetime[3];
1192 bool enable_fbc_wm;
1193 enum intel_ddb_partitioning partitioning;
1194};
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246struct i915_package_c8 {
1247 bool requirements_met;
1248 bool gpu_idle;
1249 bool irqs_disabled;
1250
1251 bool enabled;
1252 int disable_count;
1253 struct mutex lock;
1254 struct delayed_work enable_work;
1255
1256 struct {
1257 uint32_t deimr;
1258 uint32_t sdeimr;
1259 uint32_t gtimr;
1260 uint32_t gtier;
1261 uint32_t gen6_pmimr;
1262 } regsave;
1263};
1264
1265enum intel_pipe_crc_source {
1266 INTEL_PIPE_CRC_SOURCE_NONE,
1267 INTEL_PIPE_CRC_SOURCE_PLANE1,
1268 INTEL_PIPE_CRC_SOURCE_PLANE2,
1269 INTEL_PIPE_CRC_SOURCE_PF,
1270 INTEL_PIPE_CRC_SOURCE_PIPE,
1271
1272 INTEL_PIPE_CRC_SOURCE_TV,
1273 INTEL_PIPE_CRC_SOURCE_DP_B,
1274 INTEL_PIPE_CRC_SOURCE_DP_C,
1275 INTEL_PIPE_CRC_SOURCE_DP_D,
1276 INTEL_PIPE_CRC_SOURCE_AUTO,
1277 INTEL_PIPE_CRC_SOURCE_MAX,
1278};
1279
1280struct intel_pipe_crc_entry {
1281 uint32_t frame;
1282 uint32_t crc[5];
1283};
1284
1285#define INTEL_PIPE_CRC_ENTRIES_NR 128
1286struct intel_pipe_crc {
1287 spinlock_t lock;
1288 bool opened;
1289 struct intel_pipe_crc_entry *entries;
1290 enum intel_pipe_crc_source source;
1291 int head, tail;
1292 wait_queue_head_t wq;
1293};
1294
1295typedef struct drm_i915_private {
1296 struct drm_device *dev;
1297 struct kmem_cache *slab;
1298
1299 const struct intel_device_info *info;
1300
1301 int relative_constants_mode;
1302
1303 void __iomem *regs;
1304
1305 struct intel_uncore uncore;
1306
1307 struct intel_gmbus gmbus[GMBUS_NUM_PORTS];
1308
1309
1310
1311
1312 struct mutex gmbus_mutex;
1313
1314
1315
1316
1317 uint32_t gpio_mmio_base;
1318
1319 wait_queue_head_t gmbus_wait_queue;
1320
1321 struct pci_dev *bridge_dev;
1322 struct intel_ring_buffer ring[I915_NUM_RINGS];
1323 uint32_t last_seqno, next_seqno;
1324
1325 drm_dma_handle_t *status_page_dmah;
1326 struct resource mch_res;
1327
1328 atomic_t irq_received;
1329
1330
1331 spinlock_t irq_lock;
1332
1333
1334 struct pm_qos_request pm_qos;
1335
1336
1337 struct mutex dpio_lock;
1338
1339
1340 union {
1341 u32 irq_mask;
1342 u32 de_irq_mask[I915_MAX_PIPES];
1343 };
1344 u32 gt_irq_mask;
1345 u32 pm_irq_mask;
1346
1347 struct work_struct hotplug_work;
1348 bool enable_hotplug_processing;
1349 struct {
1350 unsigned long hpd_last_jiffies;
1351 int hpd_cnt;
1352 enum {
1353 HPD_ENABLED = 0,
1354 HPD_DISABLED = 1,
1355 HPD_MARK_DISABLED = 2
1356 } hpd_mark;
1357 } hpd_stats[HPD_NUM_PINS];
1358 u32 hpd_event_bits;
1359 struct timer_list hotplug_reenable_timer;
1360
1361 int num_plane;
1362
1363 struct i915_fbc fbc;
1364 struct intel_opregion opregion;
1365 struct intel_vbt_data vbt;
1366
1367
1368 struct intel_overlay *overlay;
1369 unsigned int sprite_scaling_enabled;
1370
1371
1372 struct {
1373 int level;
1374 bool enabled;
1375 spinlock_t lock;
1376 struct backlight_device *device;
1377 } backlight;
1378
1379
1380 bool no_aux_handshake;
1381
1382 struct drm_i915_fence_reg fence_regs[I915_MAX_NUM_FENCES];
1383 int fence_reg_start;
1384 int num_fence_regs;
1385
1386 unsigned int fsb_freq, mem_freq, is_ddr3;
1387
1388
1389
1390
1391
1392
1393
1394
1395 struct workqueue_struct *wq;
1396
1397
1398 struct drm_i915_display_funcs display;
1399
1400
1401 enum intel_pch pch_type;
1402 unsigned short pch_id;
1403
1404 unsigned long quirks;
1405
1406 enum modeset_restore modeset_restore;
1407 struct mutex modeset_restore_lock;
1408
1409 struct list_head vm_list;
1410 struct i915_gtt gtt;
1411
1412 struct i915_gem_mm mm;
1413
1414
1415
1416 struct sdvo_device_mapping sdvo_mappings[2];
1417
1418 struct drm_crtc *plane_to_crtc_mapping[3];
1419 struct drm_crtc *pipe_to_crtc_mapping[3];
1420 wait_queue_head_t pending_flip_queue;
1421
1422#ifdef CONFIG_DEBUG_FS
1423 struct intel_pipe_crc pipe_crc[I915_MAX_PIPES];
1424#endif
1425
1426 int num_shared_dpll;
1427 struct intel_shared_dpll shared_dplls[I915_NUM_PLLS];
1428 struct intel_ddi_plls ddi_plls;
1429
1430
1431 bool render_reclock_avail;
1432 bool lvds_downclock_avail;
1433
1434 int lvds_downclock;
1435 u16 orig_clock;
1436
1437 bool mchbar_need_disable;
1438
1439 struct intel_l3_parity l3_parity;
1440
1441
1442 size_t ellc_size;
1443
1444
1445 struct intel_gen6_power_mgmt rps;
1446
1447
1448
1449 struct intel_ilk_power_mgmt ips;
1450
1451 struct i915_power_domains power_domains;
1452
1453 struct i915_psr psr;
1454
1455 struct i915_gpu_error gpu_error;
1456
1457 struct drm_i915_gem_object *vlv_pctx;
1458
1459#ifdef CONFIG_DRM_I915_FBDEV
1460
1461 struct intel_fbdev *fbdev;
1462#endif
1463
1464
1465
1466
1467
1468 struct work_struct console_resume_work;
1469
1470 struct drm_property *broadcast_rgb_property;
1471 struct drm_property *force_audio_property;
1472
1473 bool hw_contexts_disabled;
1474 uint32_t hw_context_size;
1475 struct list_head context_list;
1476
1477 u32 fdi_rx_config;
1478
1479 struct i915_suspend_saved_registers regfile;
1480
1481 struct {
1482
1483
1484
1485
1486
1487
1488 uint16_t pri_latency[5];
1489
1490 uint16_t spr_latency[5];
1491
1492 uint16_t cur_latency[5];
1493
1494
1495 struct hsw_wm_values hw;
1496 } wm;
1497
1498 struct i915_package_c8 pc8;
1499
1500
1501
1502 struct i915_dri1_state dri1;
1503
1504 struct i915_ums_state ums;
1505} drm_i915_private_t;
1506
1507static inline struct drm_i915_private *to_i915(const struct drm_device *dev)
1508{
1509 return dev->dev_private;
1510}
1511
1512
1513#define for_each_ring(ring__, dev_priv__, i__) \
1514 for ((i__) = 0; (i__) < I915_NUM_RINGS; (i__)++) \
1515 if (((ring__) = &(dev_priv__)->ring[(i__)]), intel_ring_initialized((ring__)))
1516
1517enum hdmi_force_audio {
1518 HDMI_AUDIO_OFF_DVI = -2,
1519 HDMI_AUDIO_OFF,
1520 HDMI_AUDIO_AUTO,
1521 HDMI_AUDIO_ON,
1522};
1523
1524#define I915_GTT_OFFSET_NONE ((u32)-1)
1525
1526struct drm_i915_gem_object_ops {
1527
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538
1539
1540 int (*get_pages)(struct drm_i915_gem_object *);
1541 void (*put_pages)(struct drm_i915_gem_object *);
1542};
1543
1544struct drm_i915_gem_object {
1545 struct drm_gem_object base;
1546
1547 const struct drm_i915_gem_object_ops *ops;
1548
1549
1550 struct list_head vma_list;
1551
1552
1553 struct drm_mm_node *stolen;
1554 struct list_head global_list;
1555
1556 struct list_head ring_list;
1557
1558 struct list_head obj_exec_link;
1559
1560
1561
1562
1563
1564
1565 unsigned int active:1;
1566
1567
1568
1569
1570
1571 unsigned int dirty:1;
1572
1573
1574
1575
1576
1577
1578 signed int fence_reg:I915_MAX_NUM_FENCE_BITS;
1579
1580
1581
1582
1583 unsigned int madv:2;
1584
1585
1586
1587
1588 unsigned int tiling_mode:2;
1589
1590
1591
1592
1593
1594
1595
1596 unsigned int fence_dirty:1;
1597
1598
1599
1600
1601
1602
1603
1604
1605
1606
1607 unsigned int pin_count:4;
1608#define DRM_I915_GEM_OBJECT_MAX_PIN_COUNT 0xf
1609
1610
1611
1612
1613
1614 unsigned int map_and_fenceable:1;
1615
1616
1617
1618
1619
1620
1621 unsigned int fault_mappable:1;
1622 unsigned int pin_mappable:1;
1623 unsigned int pin_display:1;
1624
1625
1626
1627
1628 unsigned int pending_fenced_gpu_access:1;
1629 unsigned int fenced_gpu_access:1;
1630
1631 unsigned int cache_level:3;
1632
1633 unsigned int has_aliasing_ppgtt_mapping:1;
1634 unsigned int has_global_gtt_mapping:1;
1635 unsigned int has_dma_mapping:1;
1636
1637 struct sg_table *pages;
1638 int pages_pin_count;
1639
1640
1641 void *dma_buf_vmapping;
1642 int vmapping_count;
1643
1644 struct intel_ring_buffer *ring;
1645
1646
1647 uint32_t last_read_seqno;
1648 uint32_t last_write_seqno;
1649
1650 uint32_t last_fenced_seqno;
1651
1652
1653 uint32_t stride;
1654
1655
1656 unsigned long framebuffer_references;
1657
1658
1659 unsigned long *bit_17;
1660
1661
1662 unsigned long user_pin_count;
1663 struct drm_file *pin_filp;
1664
1665
1666 struct drm_i915_gem_phys_object *phys_obj;
1667};
1668#define to_gem_object(obj) (&((struct drm_i915_gem_object *)(obj))->base)
1669
1670#define to_intel_bo(x) container_of(x, struct drm_i915_gem_object, base)
1671
1672
1673
1674
1675
1676
1677
1678
1679
1680
1681
1682struct drm_i915_gem_request {
1683
1684 struct intel_ring_buffer *ring;
1685
1686
1687 uint32_t seqno;
1688
1689
1690 u32 head;
1691
1692
1693 u32 tail;
1694
1695
1696 struct i915_hw_context *ctx;
1697
1698
1699 struct drm_i915_gem_object *batch_obj;
1700
1701
1702 unsigned long emitted_jiffies;
1703
1704
1705 struct list_head list;
1706
1707 struct drm_i915_file_private *file_priv;
1708
1709 struct list_head client_list;
1710};
1711
1712struct drm_i915_file_private {
1713 struct drm_i915_private *dev_priv;
1714
1715 struct {
1716 spinlock_t lock;
1717 struct list_head request_list;
1718 struct delayed_work idle_work;
1719 } mm;
1720 struct idr context_idr;
1721
1722 struct i915_ctx_hang_stats hang_stats;
1723 atomic_t rps_wait_boost;
1724};
1725
1726#define INTEL_INFO(dev) (to_i915(dev)->info)
1727
1728#define IS_I830(dev) ((dev)->pdev->device == 0x3577)
1729#define IS_845G(dev) ((dev)->pdev->device == 0x2562)
1730#define IS_I85X(dev) (INTEL_INFO(dev)->is_i85x)
1731#define IS_I865G(dev) ((dev)->pdev->device == 0x2572)
1732#define IS_I915G(dev) (INTEL_INFO(dev)->is_i915g)
1733#define IS_I915GM(dev) ((dev)->pdev->device == 0x2592)
1734#define IS_I945G(dev) ((dev)->pdev->device == 0x2772)
1735#define IS_I945GM(dev) (INTEL_INFO(dev)->is_i945gm)
1736#define IS_BROADWATER(dev) (INTEL_INFO(dev)->is_broadwater)
1737#define IS_CRESTLINE(dev) (INTEL_INFO(dev)->is_crestline)
1738#define IS_GM45(dev) ((dev)->pdev->device == 0x2A42)
1739#define IS_G4X(dev) (INTEL_INFO(dev)->is_g4x)
1740#define IS_PINEVIEW_G(dev) ((dev)->pdev->device == 0xa001)
1741#define IS_PINEVIEW_M(dev) ((dev)->pdev->device == 0xa011)
1742#define IS_PINEVIEW(dev) (INTEL_INFO(dev)->is_pineview)
1743#define IS_G33(dev) (INTEL_INFO(dev)->is_g33)
1744#define IS_IRONLAKE_M(dev) ((dev)->pdev->device == 0x0046)
1745#define IS_IVYBRIDGE(dev) (INTEL_INFO(dev)->is_ivybridge)
1746#define IS_IVB_GT1(dev) ((dev)->pdev->device == 0x0156 || \
1747 (dev)->pdev->device == 0x0152 || \
1748 (dev)->pdev->device == 0x015a)
1749#define IS_SNB_GT1(dev) ((dev)->pdev->device == 0x0102 || \
1750 (dev)->pdev->device == 0x0106 || \
1751 (dev)->pdev->device == 0x010A)
1752#define IS_VALLEYVIEW(dev) (INTEL_INFO(dev)->is_valleyview)
1753#define IS_HASWELL(dev) (INTEL_INFO(dev)->is_haswell)
1754#define IS_BROADWELL(dev) (INTEL_INFO(dev)->gen == 8)
1755#define IS_MOBILE(dev) (INTEL_INFO(dev)->is_mobile)
1756#define IS_HSW_EARLY_SDV(dev) (IS_HASWELL(dev) && \
1757 ((dev)->pdev->device & 0xFF00) == 0x0C00)
1758#define IS_BDW_ULT(dev) (IS_BROADWELL(dev) && \
1759 (((dev)->pdev->device & 0xf) == 0x2 || \
1760 ((dev)->pdev->device & 0xf) == 0x6 || \
1761 ((dev)->pdev->device & 0xf) == 0xe))
1762#define IS_HSW_ULT(dev) (IS_HASWELL(dev) && \
1763 ((dev)->pdev->device & 0xFF00) == 0x0A00)
1764#define IS_ULT(dev) (IS_HSW_ULT(dev) || IS_BDW_ULT(dev))
1765#define IS_HSW_GT3(dev) (IS_HASWELL(dev) && \
1766 ((dev)->pdev->device & 0x00F0) == 0x0020)
1767#define IS_PRELIMINARY_HW(intel_info) ((intel_info)->is_preliminary)
1768
1769
1770
1771
1772
1773
1774
1775#define IS_GEN2(dev) (INTEL_INFO(dev)->gen == 2)
1776#define IS_GEN3(dev) (INTEL_INFO(dev)->gen == 3)
1777#define IS_GEN4(dev) (INTEL_INFO(dev)->gen == 4)
1778#define IS_GEN5(dev) (INTEL_INFO(dev)->gen == 5)
1779#define IS_GEN6(dev) (INTEL_INFO(dev)->gen == 6)
1780#define IS_GEN7(dev) (INTEL_INFO(dev)->gen == 7)
1781#define IS_GEN8(dev) (INTEL_INFO(dev)->gen == 8)
1782
1783#define RENDER_RING (1<<RCS)
1784#define BSD_RING (1<<VCS)
1785#define BLT_RING (1<<BCS)
1786#define VEBOX_RING (1<<VECS)
1787#define HAS_BSD(dev) (INTEL_INFO(dev)->ring_mask & BSD_RING)
1788#define HAS_BLT(dev) (INTEL_INFO(dev)->ring_mask & BLT_RING)
1789#define HAS_VEBOX(dev) (INTEL_INFO(dev)->ring_mask & VEBOX_RING)
1790#define HAS_LLC(dev) (INTEL_INFO(dev)->has_llc)
1791#define HAS_WT(dev) (IS_HASWELL(dev) && to_i915(dev)->ellc_size)
1792#define I915_NEED_GFX_HWS(dev) (INTEL_INFO(dev)->need_gfx_hws)
1793
1794#define HAS_HW_CONTEXTS(dev) (INTEL_INFO(dev)->gen >= 6)
1795#define HAS_ALIASING_PPGTT(dev) (INTEL_INFO(dev)->gen >=6 && !IS_VALLEYVIEW(dev))
1796
1797#define HAS_OVERLAY(dev) (INTEL_INFO(dev)->has_overlay)
1798#define OVERLAY_NEEDS_PHYSICAL(dev) (INTEL_INFO(dev)->overlay_needs_physical)
1799
1800
1801#define HAS_BROKEN_CS_TLB(dev) (IS_I830(dev) || IS_845G(dev))
1802
1803
1804
1805
1806#define HAS_128_BYTE_Y_TILING(dev) (!IS_GEN2(dev) && !(IS_I915G(dev) || \
1807 IS_I915GM(dev)))
1808#define SUPPORTS_DIGITAL_OUTPUTS(dev) (!IS_GEN2(dev) && !IS_PINEVIEW(dev))
1809#define SUPPORTS_INTEGRATED_HDMI(dev) (IS_G4X(dev) || IS_GEN5(dev))
1810#define SUPPORTS_INTEGRATED_DP(dev) (IS_G4X(dev) || IS_GEN5(dev))
1811#define SUPPORTS_TV(dev) (INTEL_INFO(dev)->supports_tv)
1812#define I915_HAS_HOTPLUG(dev) (INTEL_INFO(dev)->has_hotplug)
1813
1814#define HAS_FW_BLC(dev) (INTEL_INFO(dev)->gen > 2)
1815#define HAS_PIPE_CXSR(dev) (INTEL_INFO(dev)->has_pipe_cxsr)
1816#define I915_HAS_FBC(dev) (INTEL_INFO(dev)->has_fbc)
1817
1818#define HAS_IPS(dev) (IS_ULT(dev) || IS_BROADWELL(dev))
1819
1820#define HAS_DDI(dev) (INTEL_INFO(dev)->has_ddi)
1821#define HAS_POWER_WELL(dev) (IS_HASWELL(dev) || IS_BROADWELL(dev))
1822#define HAS_FPGA_DBG_UNCLAIMED(dev) (INTEL_INFO(dev)->has_fpga_dbg)
1823#define HAS_PSR(dev) (IS_HASWELL(dev) || IS_BROADWELL(dev))
1824#define HAS_PC8(dev) (IS_HASWELL(dev))
1825
1826#define INTEL_PCH_DEVICE_ID_MASK 0xff00
1827#define INTEL_PCH_IBX_DEVICE_ID_TYPE 0x3b00
1828#define INTEL_PCH_CPT_DEVICE_ID_TYPE 0x1c00
1829#define INTEL_PCH_PPT_DEVICE_ID_TYPE 0x1e00
1830#define INTEL_PCH_LPT_DEVICE_ID_TYPE 0x8c00
1831#define INTEL_PCH_LPT_LP_DEVICE_ID_TYPE 0x9c00
1832
1833#define INTEL_PCH_TYPE(dev) (to_i915(dev)->pch_type)
1834#define HAS_PCH_LPT(dev) (INTEL_PCH_TYPE(dev) == PCH_LPT)
1835#define HAS_PCH_CPT(dev) (INTEL_PCH_TYPE(dev) == PCH_CPT)
1836#define HAS_PCH_IBX(dev) (INTEL_PCH_TYPE(dev) == PCH_IBX)
1837#define HAS_PCH_NOP(dev) (INTEL_PCH_TYPE(dev) == PCH_NOP)
1838#define HAS_PCH_SPLIT(dev) (INTEL_PCH_TYPE(dev) != PCH_NONE)
1839
1840
1841#define HAS_L3_DPF(dev) (IS_IVYBRIDGE(dev) || IS_HASWELL(dev))
1842#define NUM_L3_SLICES(dev) (IS_HSW_GT3(dev) ? 2 : HAS_L3_DPF(dev))
1843
1844#define GT_FREQUENCY_MULTIPLIER 50
1845
1846#include "i915_trace.h"
1847
1848extern const struct drm_ioctl_desc i915_ioctls[];
1849extern int i915_max_ioctl;
1850extern unsigned int i915_fbpercrtc __always_unused;
1851extern int i915_panel_ignore_lid __read_mostly;
1852extern unsigned int i915_powersave __read_mostly;
1853extern int i915_semaphores __read_mostly;
1854extern unsigned int i915_lvds_downclock __read_mostly;
1855extern int i915_lvds_channel_mode __read_mostly;
1856extern int i915_panel_use_ssc __read_mostly;
1857extern int i915_vbt_sdvo_panel_type __read_mostly;
1858extern int i915_enable_rc6 __read_mostly;
1859extern int i915_enable_fbc __read_mostly;
1860extern bool i915_enable_hangcheck __read_mostly;
1861extern int i915_enable_ppgtt __read_mostly;
1862extern int i915_enable_psr __read_mostly;
1863extern unsigned int i915_preliminary_hw_support __read_mostly;
1864extern int i915_disable_power_well __read_mostly;
1865extern int i915_enable_ips __read_mostly;
1866extern bool i915_fastboot __read_mostly;
1867extern int i915_enable_pc8 __read_mostly;
1868extern int i915_pc8_timeout __read_mostly;
1869extern bool i915_prefault_disable __read_mostly;
1870
1871extern int i915_suspend(struct drm_device *dev, pm_message_t state);
1872extern int i915_resume(struct drm_device *dev);
1873extern int i915_master_create(struct drm_device *dev, struct drm_master *master);
1874extern void i915_master_destroy(struct drm_device *dev, struct drm_master *master);
1875
1876
1877void i915_update_dri1_breadcrumb(struct drm_device *dev);
1878extern void i915_kernel_lost_context(struct drm_device * dev);
1879extern int i915_driver_load(struct drm_device *, unsigned long flags);
1880extern int i915_driver_unload(struct drm_device *);
1881extern int i915_driver_open(struct drm_device *dev, struct drm_file *file_priv);
1882extern void i915_driver_lastclose(struct drm_device * dev);
1883extern void i915_driver_preclose(struct drm_device *dev,
1884 struct drm_file *file_priv);
1885extern void i915_driver_postclose(struct drm_device *dev,
1886 struct drm_file *file_priv);
1887extern int i915_driver_device_is_agp(struct drm_device * dev);
1888#ifdef CONFIG_COMPAT
1889extern long i915_compat_ioctl(struct file *filp, unsigned int cmd,
1890 unsigned long arg);
1891#endif
1892extern int i915_emit_box(struct drm_device *dev,
1893 struct drm_clip_rect *box,
1894 int DR1, int DR4);
1895extern int intel_gpu_reset(struct drm_device *dev);
1896extern int i915_reset(struct drm_device *dev);
1897extern unsigned long i915_chipset_val(struct drm_i915_private *dev_priv);
1898extern unsigned long i915_mch_val(struct drm_i915_private *dev_priv);
1899extern unsigned long i915_gfx_val(struct drm_i915_private *dev_priv);
1900extern void i915_update_gfx_val(struct drm_i915_private *dev_priv);
1901
1902extern void intel_console_resume(struct work_struct *work);
1903
1904
1905void i915_queue_hangcheck(struct drm_device *dev);
1906void i915_handle_error(struct drm_device *dev, bool wedged);
1907
1908extern void intel_irq_init(struct drm_device *dev);
1909extern void intel_hpd_init(struct drm_device *dev);
1910
1911extern void intel_uncore_sanitize(struct drm_device *dev);
1912extern void intel_uncore_early_sanitize(struct drm_device *dev);
1913extern void intel_uncore_init(struct drm_device *dev);
1914extern void intel_uncore_clear_errors(struct drm_device *dev);
1915extern void intel_uncore_check_errors(struct drm_device *dev);
1916extern void intel_uncore_fini(struct drm_device *dev);
1917
1918void
1919i915_enable_pipestat(drm_i915_private_t *dev_priv, enum pipe pipe, u32 mask);
1920
1921void
1922i915_disable_pipestat(drm_i915_private_t *dev_priv, enum pipe pipe, u32 mask);
1923
1924
1925int i915_gem_init_ioctl(struct drm_device *dev, void *data,
1926 struct drm_file *file_priv);
1927int i915_gem_create_ioctl(struct drm_device *dev, void *data,
1928 struct drm_file *file_priv);
1929int i915_gem_pread_ioctl(struct drm_device *dev, void *data,
1930 struct drm_file *file_priv);
1931int i915_gem_pwrite_ioctl(struct drm_device *dev, void *data,
1932 struct drm_file *file_priv);
1933int i915_gem_mmap_ioctl(struct drm_device *dev, void *data,
1934 struct drm_file *file_priv);
1935int i915_gem_mmap_gtt_ioctl(struct drm_device *dev, void *data,
1936 struct drm_file *file_priv);
1937int i915_gem_set_domain_ioctl(struct drm_device *dev, void *data,
1938 struct drm_file *file_priv);
1939int i915_gem_sw_finish_ioctl(struct drm_device *dev, void *data,
1940 struct drm_file *file_priv);
1941int i915_gem_execbuffer(struct drm_device *dev, void *data,
1942 struct drm_file *file_priv);
1943int i915_gem_execbuffer2(struct drm_device *dev, void *data,
1944 struct drm_file *file_priv);
1945int i915_gem_pin_ioctl(struct drm_device *dev, void *data,
1946 struct drm_file *file_priv);
1947int i915_gem_unpin_ioctl(struct drm_device *dev, void *data,
1948 struct drm_file *file_priv);
1949int i915_gem_busy_ioctl(struct drm_device *dev, void *data,
1950 struct drm_file *file_priv);
1951int i915_gem_get_caching_ioctl(struct drm_device *dev, void *data,
1952 struct drm_file *file);
1953int i915_gem_set_caching_ioctl(struct drm_device *dev, void *data,
1954 struct drm_file *file);
1955int i915_gem_throttle_ioctl(struct drm_device *dev, void *data,
1956 struct drm_file *file_priv);
1957int i915_gem_madvise_ioctl(struct drm_device *dev, void *data,
1958 struct drm_file *file_priv);
1959int i915_gem_entervt_ioctl(struct drm_device *dev, void *data,
1960 struct drm_file *file_priv);
1961int i915_gem_leavevt_ioctl(struct drm_device *dev, void *data,
1962 struct drm_file *file_priv);
1963int i915_gem_set_tiling(struct drm_device *dev, void *data,
1964 struct drm_file *file_priv);
1965int i915_gem_get_tiling(struct drm_device *dev, void *data,
1966 struct drm_file *file_priv);
1967int i915_gem_get_aperture_ioctl(struct drm_device *dev, void *data,
1968 struct drm_file *file_priv);
1969int i915_gem_wait_ioctl(struct drm_device *dev, void *data,
1970 struct drm_file *file_priv);
1971void i915_gem_load(struct drm_device *dev);
1972void *i915_gem_object_alloc(struct drm_device *dev);
1973void i915_gem_object_free(struct drm_i915_gem_object *obj);
1974void i915_gem_object_init(struct drm_i915_gem_object *obj,
1975 const struct drm_i915_gem_object_ops *ops);
1976struct drm_i915_gem_object *i915_gem_alloc_object(struct drm_device *dev,
1977 size_t size);
1978void i915_gem_free_object(struct drm_gem_object *obj);
1979void i915_gem_vma_destroy(struct i915_vma *vma);
1980
1981int __must_check i915_gem_object_pin(struct drm_i915_gem_object *obj,
1982 struct i915_address_space *vm,
1983 uint32_t alignment,
1984 bool map_and_fenceable,
1985 bool nonblocking);
1986void i915_gem_object_unpin(struct drm_i915_gem_object *obj);
1987int __must_check i915_vma_unbind(struct i915_vma *vma);
1988int __must_check i915_gem_object_ggtt_unbind(struct drm_i915_gem_object *obj);
1989int i915_gem_object_put_pages(struct drm_i915_gem_object *obj);
1990void i915_gem_release_mmap(struct drm_i915_gem_object *obj);
1991void i915_gem_lastclose(struct drm_device *dev);
1992
1993int __must_check i915_gem_object_get_pages(struct drm_i915_gem_object *obj);
1994static inline struct page *i915_gem_object_get_page(struct drm_i915_gem_object *obj, int n)
1995{
1996 struct sg_page_iter sg_iter;
1997
1998 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, n)
1999 return sg_page_iter_page(&sg_iter);
2000
2001 return NULL;
2002}
2003static inline void i915_gem_object_pin_pages(struct drm_i915_gem_object *obj)
2004{
2005 BUG_ON(obj->pages == NULL);
2006 obj->pages_pin_count++;
2007}
2008static inline void i915_gem_object_unpin_pages(struct drm_i915_gem_object *obj)
2009{
2010 BUG_ON(obj->pages_pin_count == 0);
2011 obj->pages_pin_count--;
2012}
2013
2014int __must_check i915_mutex_lock_interruptible(struct drm_device *dev);
2015int i915_gem_object_sync(struct drm_i915_gem_object *obj,
2016 struct intel_ring_buffer *to);
2017void i915_vma_move_to_active(struct i915_vma *vma,
2018 struct intel_ring_buffer *ring);
2019int i915_gem_dumb_create(struct drm_file *file_priv,
2020 struct drm_device *dev,
2021 struct drm_mode_create_dumb *args);
2022int i915_gem_mmap_gtt(struct drm_file *file_priv, struct drm_device *dev,
2023 uint32_t handle, uint64_t *offset);
2024
2025
2026
2027static inline bool
2028i915_seqno_passed(uint32_t seq1, uint32_t seq2)
2029{
2030 return (int32_t)(seq1 - seq2) >= 0;
2031}
2032
2033int __must_check i915_gem_get_seqno(struct drm_device *dev, u32 *seqno);
2034int __must_check i915_gem_set_seqno(struct drm_device *dev, u32 seqno);
2035int __must_check i915_gem_object_get_fence(struct drm_i915_gem_object *obj);
2036int __must_check i915_gem_object_put_fence(struct drm_i915_gem_object *obj);
2037
2038static inline bool
2039i915_gem_object_pin_fence(struct drm_i915_gem_object *obj)
2040{
2041 if (obj->fence_reg != I915_FENCE_REG_NONE) {
2042 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
2043 dev_priv->fence_regs[obj->fence_reg].pin_count++;
2044 return true;
2045 } else
2046 return false;
2047}
2048
2049static inline void
2050i915_gem_object_unpin_fence(struct drm_i915_gem_object *obj)
2051{
2052 if (obj->fence_reg != I915_FENCE_REG_NONE) {
2053 struct drm_i915_private *dev_priv = obj->base.dev->dev_private;
2054 WARN_ON(dev_priv->fence_regs[obj->fence_reg].pin_count <= 0);
2055 dev_priv->fence_regs[obj->fence_reg].pin_count--;
2056 }
2057}
2058
2059bool i915_gem_retire_requests(struct drm_device *dev);
2060void i915_gem_retire_requests_ring(struct intel_ring_buffer *ring);
2061int __must_check i915_gem_check_wedge(struct i915_gpu_error *error,
2062 bool interruptible);
2063static inline bool i915_reset_in_progress(struct i915_gpu_error *error)
2064{
2065 return unlikely(atomic_read(&error->reset_counter)
2066 & I915_RESET_IN_PROGRESS_FLAG);
2067}
2068
2069static inline bool i915_terminally_wedged(struct i915_gpu_error *error)
2070{
2071 return atomic_read(&error->reset_counter) == I915_WEDGED;
2072}
2073
2074void i915_gem_reset(struct drm_device *dev);
2075bool i915_gem_clflush_object(struct drm_i915_gem_object *obj, bool force);
2076int __must_check i915_gem_object_finish_gpu(struct drm_i915_gem_object *obj);
2077int __must_check i915_gem_init(struct drm_device *dev);
2078int __must_check i915_gem_init_hw(struct drm_device *dev);
2079int i915_gem_l3_remap(struct intel_ring_buffer *ring, int slice);
2080void i915_gem_init_swizzling(struct drm_device *dev);
2081void i915_gem_cleanup_ringbuffer(struct drm_device *dev);
2082int __must_check i915_gpu_idle(struct drm_device *dev);
2083int __must_check i915_gem_suspend(struct drm_device *dev);
2084int __i915_add_request(struct intel_ring_buffer *ring,
2085 struct drm_file *file,
2086 struct drm_i915_gem_object *batch_obj,
2087 u32 *seqno);
2088#define i915_add_request(ring, seqno) \
2089 __i915_add_request(ring, NULL, NULL, seqno)
2090int __must_check i915_wait_seqno(struct intel_ring_buffer *ring,
2091 uint32_t seqno);
2092int i915_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf);
2093int __must_check
2094i915_gem_object_set_to_gtt_domain(struct drm_i915_gem_object *obj,
2095 bool write);
2096int __must_check
2097i915_gem_object_set_to_cpu_domain(struct drm_i915_gem_object *obj, bool write);
2098int __must_check
2099i915_gem_object_pin_to_display_plane(struct drm_i915_gem_object *obj,
2100 u32 alignment,
2101 struct intel_ring_buffer *pipelined);
2102void i915_gem_object_unpin_from_display_plane(struct drm_i915_gem_object *obj);
2103int i915_gem_attach_phys_object(struct drm_device *dev,
2104 struct drm_i915_gem_object *obj,
2105 int id,
2106 int align);
2107void i915_gem_detach_phys_object(struct drm_device *dev,
2108 struct drm_i915_gem_object *obj);
2109void i915_gem_free_all_phys_object(struct drm_device *dev);
2110int i915_gem_open(struct drm_device *dev, struct drm_file *file);
2111void i915_gem_release(struct drm_device *dev, struct drm_file *file);
2112
2113uint32_t
2114i915_gem_get_gtt_size(struct drm_device *dev, uint32_t size, int tiling_mode);
2115uint32_t
2116i915_gem_get_gtt_alignment(struct drm_device *dev, uint32_t size,
2117 int tiling_mode, bool fenced);
2118
2119int i915_gem_object_set_cache_level(struct drm_i915_gem_object *obj,
2120 enum i915_cache_level cache_level);
2121
2122struct drm_gem_object *i915_gem_prime_import(struct drm_device *dev,
2123 struct dma_buf *dma_buf);
2124
2125struct dma_buf *i915_gem_prime_export(struct drm_device *dev,
2126 struct drm_gem_object *gem_obj, int flags);
2127
2128void i915_gem_restore_fences(struct drm_device *dev);
2129
2130unsigned long i915_gem_obj_offset(struct drm_i915_gem_object *o,
2131 struct i915_address_space *vm);
2132bool i915_gem_obj_bound_any(struct drm_i915_gem_object *o);
2133bool i915_gem_obj_bound(struct drm_i915_gem_object *o,
2134 struct i915_address_space *vm);
2135unsigned long i915_gem_obj_size(struct drm_i915_gem_object *o,
2136 struct i915_address_space *vm);
2137struct i915_vma *i915_gem_obj_to_vma(struct drm_i915_gem_object *obj,
2138 struct i915_address_space *vm);
2139struct i915_vma *
2140i915_gem_obj_lookup_or_create_vma(struct drm_i915_gem_object *obj,
2141 struct i915_address_space *vm);
2142
2143struct i915_vma *i915_gem_obj_to_ggtt(struct drm_i915_gem_object *obj);
2144
2145
2146#define obj_to_ggtt(obj) \
2147 (&((struct drm_i915_private *)(obj)->base.dev->dev_private)->gtt.base)
2148static inline bool i915_is_ggtt(struct i915_address_space *vm)
2149{
2150 struct i915_address_space *ggtt =
2151 &((struct drm_i915_private *)(vm)->dev->dev_private)->gtt.base;
2152 return vm == ggtt;
2153}
2154
2155static inline bool i915_gem_obj_ggtt_bound(struct drm_i915_gem_object *obj)
2156{
2157 return i915_gem_obj_bound(obj, obj_to_ggtt(obj));
2158}
2159
2160static inline unsigned long
2161i915_gem_obj_ggtt_offset(struct drm_i915_gem_object *obj)
2162{
2163 return i915_gem_obj_offset(obj, obj_to_ggtt(obj));
2164}
2165
2166static inline unsigned long
2167i915_gem_obj_ggtt_size(struct drm_i915_gem_object *obj)
2168{
2169 return i915_gem_obj_size(obj, obj_to_ggtt(obj));
2170}
2171
2172static inline int __must_check
2173i915_gem_obj_ggtt_pin(struct drm_i915_gem_object *obj,
2174 uint32_t alignment,
2175 bool map_and_fenceable,
2176 bool nonblocking)
2177{
2178 return i915_gem_object_pin(obj, obj_to_ggtt(obj), alignment,
2179 map_and_fenceable, nonblocking);
2180}
2181
2182
2183void i915_gem_context_init(struct drm_device *dev);
2184void i915_gem_context_fini(struct drm_device *dev);
2185void i915_gem_context_close(struct drm_device *dev, struct drm_file *file);
2186int i915_switch_context(struct intel_ring_buffer *ring,
2187 struct drm_file *file, int to_id);
2188void i915_gem_context_free(struct kref *ctx_ref);
2189static inline void i915_gem_context_reference(struct i915_hw_context *ctx)
2190{
2191 kref_get(&ctx->ref);
2192}
2193
2194static inline void i915_gem_context_unreference(struct i915_hw_context *ctx)
2195{
2196 kref_put(&ctx->ref, i915_gem_context_free);
2197}
2198
2199struct i915_ctx_hang_stats * __must_check
2200i915_gem_context_get_hang_stats(struct drm_device *dev,
2201 struct drm_file *file,
2202 u32 id);
2203int i915_gem_context_create_ioctl(struct drm_device *dev, void *data,
2204 struct drm_file *file);
2205int i915_gem_context_destroy_ioctl(struct drm_device *dev, void *data,
2206 struct drm_file *file);
2207
2208
2209void i915_gem_cleanup_aliasing_ppgtt(struct drm_device *dev);
2210void i915_ppgtt_bind_object(struct i915_hw_ppgtt *ppgtt,
2211 struct drm_i915_gem_object *obj,
2212 enum i915_cache_level cache_level);
2213void i915_ppgtt_unbind_object(struct i915_hw_ppgtt *ppgtt,
2214 struct drm_i915_gem_object *obj);
2215
2216void i915_check_and_clear_faults(struct drm_device *dev);
2217void i915_gem_suspend_gtt_mappings(struct drm_device *dev);
2218void i915_gem_restore_gtt_mappings(struct drm_device *dev);
2219int __must_check i915_gem_gtt_prepare_object(struct drm_i915_gem_object *obj);
2220void i915_gem_gtt_bind_object(struct drm_i915_gem_object *obj,
2221 enum i915_cache_level cache_level);
2222void i915_gem_gtt_unbind_object(struct drm_i915_gem_object *obj);
2223void i915_gem_gtt_finish_object(struct drm_i915_gem_object *obj);
2224void i915_gem_init_global_gtt(struct drm_device *dev);
2225void i915_gem_setup_global_gtt(struct drm_device *dev, unsigned long start,
2226 unsigned long mappable_end, unsigned long end);
2227int i915_gem_gtt_init(struct drm_device *dev);
2228static inline void i915_gem_chipset_flush(struct drm_device *dev)
2229{
2230 if (INTEL_INFO(dev)->gen < 6)
2231 intel_gtt_chipset_flush();
2232}
2233
2234
2235
2236int __must_check i915_gem_evict_something(struct drm_device *dev,
2237 struct i915_address_space *vm,
2238 int min_size,
2239 unsigned alignment,
2240 unsigned cache_level,
2241 bool mappable,
2242 bool nonblock);
2243int i915_gem_evict_vm(struct i915_address_space *vm, bool do_idle);
2244int i915_gem_evict_everything(struct drm_device *dev);
2245
2246
2247int i915_gem_init_stolen(struct drm_device *dev);
2248int i915_gem_stolen_setup_compression(struct drm_device *dev, int size);
2249void i915_gem_stolen_cleanup_compression(struct drm_device *dev);
2250void i915_gem_cleanup_stolen(struct drm_device *dev);
2251struct drm_i915_gem_object *
2252i915_gem_object_create_stolen(struct drm_device *dev, u32 size);
2253struct drm_i915_gem_object *
2254i915_gem_object_create_stolen_for_preallocated(struct drm_device *dev,
2255 u32 stolen_offset,
2256 u32 gtt_offset,
2257 u32 size);
2258void i915_gem_object_release_stolen(struct drm_i915_gem_object *obj);
2259
2260
2261static inline bool i915_gem_object_needs_bit17_swizzle(struct drm_i915_gem_object *obj)
2262{
2263 drm_i915_private_t *dev_priv = obj->base.dev->dev_private;
2264
2265 return dev_priv->mm.bit_6_swizzle_x == I915_BIT_6_SWIZZLE_9_10_17 &&
2266 obj->tiling_mode != I915_TILING_NONE;
2267}
2268
2269void i915_gem_detect_bit_6_swizzle(struct drm_device *dev);
2270void i915_gem_object_do_bit_17_swizzle(struct drm_i915_gem_object *obj);
2271void i915_gem_object_save_bit_17_swizzle(struct drm_i915_gem_object *obj);
2272
2273
2274#if WATCH_LISTS
2275int i915_verify_lists(struct drm_device *dev);
2276#else
2277#define i915_verify_lists(dev) 0
2278#endif
2279
2280
2281int i915_debugfs_init(struct drm_minor *minor);
2282void i915_debugfs_cleanup(struct drm_minor *minor);
2283#ifdef CONFIG_DEBUG_FS
2284void intel_display_crc_init(struct drm_device *dev);
2285#else
2286static inline void intel_display_crc_init(struct drm_device *dev) {}
2287#endif
2288
2289
2290__printf(2, 3)
2291void i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...);
2292int i915_error_state_to_str(struct drm_i915_error_state_buf *estr,
2293 const struct i915_error_state_file_priv *error);
2294int i915_error_state_buf_init(struct drm_i915_error_state_buf *eb,
2295 size_t count, loff_t pos);
2296static inline void i915_error_state_buf_release(
2297 struct drm_i915_error_state_buf *eb)
2298{
2299 kfree(eb->buf);
2300}
2301void i915_capture_error_state(struct drm_device *dev);
2302void i915_error_state_get(struct drm_device *dev,
2303 struct i915_error_state_file_priv *error_priv);
2304void i915_error_state_put(struct i915_error_state_file_priv *error_priv);
2305void i915_destroy_error_state(struct drm_device *dev);
2306
2307void i915_get_extra_instdone(struct drm_device *dev, uint32_t *instdone);
2308const char *i915_cache_level_str(int type);
2309
2310
2311extern int i915_save_state(struct drm_device *dev);
2312extern int i915_restore_state(struct drm_device *dev);
2313
2314
2315void i915_save_display_reg(struct drm_device *dev);
2316void i915_restore_display_reg(struct drm_device *dev);
2317
2318
2319void i915_setup_sysfs(struct drm_device *dev_priv);
2320void i915_teardown_sysfs(struct drm_device *dev_priv);
2321
2322
2323extern int intel_setup_gmbus(struct drm_device *dev);
2324extern void intel_teardown_gmbus(struct drm_device *dev);
2325static inline bool intel_gmbus_is_port_valid(unsigned port)
2326{
2327 return (port >= GMBUS_PORT_SSC && port <= GMBUS_PORT_DPD);
2328}
2329
2330extern struct i2c_adapter *intel_gmbus_get_adapter(
2331 struct drm_i915_private *dev_priv, unsigned port);
2332extern void intel_gmbus_set_speed(struct i2c_adapter *adapter, int speed);
2333extern void intel_gmbus_force_bit(struct i2c_adapter *adapter, bool force_bit);
2334static inline bool intel_gmbus_is_forced_bit(struct i2c_adapter *adapter)
2335{
2336 return container_of(adapter, struct intel_gmbus, adapter)->force_bit;
2337}
2338extern void intel_i2c_reset(struct drm_device *dev);
2339
2340
2341struct intel_encoder;
2342extern int intel_opregion_setup(struct drm_device *dev);
2343#ifdef CONFIG_ACPI
2344extern void intel_opregion_init(struct drm_device *dev);
2345extern void intel_opregion_fini(struct drm_device *dev);
2346extern void intel_opregion_asle_intr(struct drm_device *dev);
2347extern int intel_opregion_notify_encoder(struct intel_encoder *intel_encoder,
2348 bool enable);
2349extern int intel_opregion_notify_adapter(struct drm_device *dev,
2350 pci_power_t state);
2351#else
2352static inline void intel_opregion_init(struct drm_device *dev) { return; }
2353static inline void intel_opregion_fini(struct drm_device *dev) { return; }
2354static inline void intel_opregion_asle_intr(struct drm_device *dev) { return; }
2355static inline int
2356intel_opregion_notify_encoder(struct intel_encoder *intel_encoder, bool enable)
2357{
2358 return 0;
2359}
2360static inline int
2361intel_opregion_notify_adapter(struct drm_device *dev, pci_power_t state)
2362{
2363 return 0;
2364}
2365#endif
2366
2367
2368#ifdef CONFIG_ACPI
2369extern void intel_register_dsm_handler(void);
2370extern void intel_unregister_dsm_handler(void);
2371#else
2372static inline void intel_register_dsm_handler(void) { return; }
2373static inline void intel_unregister_dsm_handler(void) { return; }
2374#endif
2375
2376
2377extern void intel_modeset_init_hw(struct drm_device *dev);
2378extern void intel_modeset_suspend_hw(struct drm_device *dev);
2379extern void intel_modeset_init(struct drm_device *dev);
2380extern void intel_modeset_gem_init(struct drm_device *dev);
2381extern void intel_modeset_cleanup(struct drm_device *dev);
2382extern int intel_modeset_vga_set_state(struct drm_device *dev, bool state);
2383extern void intel_modeset_setup_hw_state(struct drm_device *dev,
2384 bool force_restore);
2385extern void i915_redisable_vga(struct drm_device *dev);
2386extern bool intel_fbc_enabled(struct drm_device *dev);
2387extern void intel_disable_fbc(struct drm_device *dev);
2388extern bool ironlake_set_drps(struct drm_device *dev, u8 val);
2389extern void intel_init_pch_refclk(struct drm_device *dev);
2390extern void gen6_set_rps(struct drm_device *dev, u8 val);
2391extern void valleyview_set_rps(struct drm_device *dev, u8 val);
2392extern int valleyview_rps_max_freq(struct drm_i915_private *dev_priv);
2393extern int valleyview_rps_min_freq(struct drm_i915_private *dev_priv);
2394extern void intel_detect_pch(struct drm_device *dev);
2395extern int intel_trans_dp_port_sel(struct drm_crtc *crtc);
2396extern int intel_enable_rc6(const struct drm_device *dev);
2397
2398extern bool i915_semaphore_is_enabled(struct drm_device *dev);
2399int i915_reg_read_ioctl(struct drm_device *dev, void *data,
2400 struct drm_file *file);
2401
2402
2403extern struct intel_overlay_error_state *intel_overlay_capture_error_state(struct drm_device *dev);
2404extern void intel_overlay_print_error_state(struct drm_i915_error_state_buf *e,
2405 struct intel_overlay_error_state *error);
2406
2407extern struct intel_display_error_state *intel_display_capture_error_state(struct drm_device *dev);
2408extern void intel_display_print_error_state(struct drm_i915_error_state_buf *e,
2409 struct drm_device *dev,
2410 struct intel_display_error_state *error);
2411
2412
2413
2414
2415
2416void gen6_gt_force_wake_get(struct drm_i915_private *dev_priv);
2417void gen6_gt_force_wake_put(struct drm_i915_private *dev_priv);
2418
2419int sandybridge_pcode_read(struct drm_i915_private *dev_priv, u8 mbox, u32 *val);
2420int sandybridge_pcode_write(struct drm_i915_private *dev_priv, u8 mbox, u32 val);
2421
2422
2423u32 vlv_punit_read(struct drm_i915_private *dev_priv, u8 addr);
2424void vlv_punit_write(struct drm_i915_private *dev_priv, u8 addr, u32 val);
2425u32 vlv_nc_read(struct drm_i915_private *dev_priv, u8 addr);
2426u32 vlv_gpio_nc_read(struct drm_i915_private *dev_priv, u32 reg);
2427void vlv_gpio_nc_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2428u32 vlv_cck_read(struct drm_i915_private *dev_priv, u32 reg);
2429void vlv_cck_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2430u32 vlv_ccu_read(struct drm_i915_private *dev_priv, u32 reg);
2431void vlv_ccu_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2432u32 vlv_gps_core_read(struct drm_i915_private *dev_priv, u32 reg);
2433void vlv_gps_core_write(struct drm_i915_private *dev_priv, u32 reg, u32 val);
2434u32 vlv_dpio_read(struct drm_i915_private *dev_priv, enum pipe pipe, int reg);
2435void vlv_dpio_write(struct drm_i915_private *dev_priv, enum pipe pipe, int reg, u32 val);
2436u32 intel_sbi_read(struct drm_i915_private *dev_priv, u16 reg,
2437 enum intel_sbi_destination destination);
2438void intel_sbi_write(struct drm_i915_private *dev_priv, u16 reg, u32 value,
2439 enum intel_sbi_destination destination);
2440
2441int vlv_gpu_freq(int ddr_freq, int val);
2442int vlv_freq_opcode(int ddr_freq, int val);
2443
2444#define I915_READ8(reg) dev_priv->uncore.funcs.mmio_readb(dev_priv, (reg), true)
2445#define I915_WRITE8(reg, val) dev_priv->uncore.funcs.mmio_writeb(dev_priv, (reg), (val), true)
2446
2447#define I915_READ16(reg) dev_priv->uncore.funcs.mmio_readw(dev_priv, (reg), true)
2448#define I915_WRITE16(reg, val) dev_priv->uncore.funcs.mmio_writew(dev_priv, (reg), (val), true)
2449#define I915_READ16_NOTRACE(reg) dev_priv->uncore.funcs.mmio_readw(dev_priv, (reg), false)
2450#define I915_WRITE16_NOTRACE(reg, val) dev_priv->uncore.funcs.mmio_writew(dev_priv, (reg), (val), false)
2451
2452#define I915_READ(reg) dev_priv->uncore.funcs.mmio_readl(dev_priv, (reg), true)
2453#define I915_WRITE(reg, val) dev_priv->uncore.funcs.mmio_writel(dev_priv, (reg), (val), true)
2454#define I915_READ_NOTRACE(reg) dev_priv->uncore.funcs.mmio_readl(dev_priv, (reg), false)
2455#define I915_WRITE_NOTRACE(reg, val) dev_priv->uncore.funcs.mmio_writel(dev_priv, (reg), (val), false)
2456
2457#define I915_WRITE64(reg, val) dev_priv->uncore.funcs.mmio_writeq(dev_priv, (reg), (val), true)
2458#define I915_READ64(reg) dev_priv->uncore.funcs.mmio_readq(dev_priv, (reg), true)
2459
2460#define POSTING_READ(reg) (void)I915_READ_NOTRACE(reg)
2461#define POSTING_READ16(reg) (void)I915_READ16_NOTRACE(reg)
2462
2463
2464#define INTEL_BROADCAST_RGB_AUTO 0
2465#define INTEL_BROADCAST_RGB_FULL 1
2466#define INTEL_BROADCAST_RGB_LIMITED 2
2467
2468static inline uint32_t i915_vgacntrl_reg(struct drm_device *dev)
2469{
2470 if (HAS_PCH_SPLIT(dev))
2471 return CPU_VGACNTRL;
2472 else if (IS_VALLEYVIEW(dev))
2473 return VLV_VGACNTRL;
2474 else
2475 return VGACNTRL;
2476}
2477
2478static inline void __user *to_user_ptr(u64 address)
2479{
2480 return (void __user *)(uintptr_t)address;
2481}
2482
2483static inline unsigned long msecs_to_jiffies_timeout(const unsigned int m)
2484{
2485 unsigned long j = msecs_to_jiffies(m);
2486
2487 return min_t(unsigned long, MAX_JIFFY_OFFSET, j + 1);
2488}
2489
2490static inline unsigned long
2491timespec_to_jiffies_timeout(const struct timespec *value)
2492{
2493 unsigned long j = timespec_to_jiffies(value);
2494
2495 return min_t(unsigned long, MAX_JIFFY_OFFSET, j + 1);
2496}
2497
2498#endif
2499