1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28#include <drm/drm_atomic_helper.h>
29#include <drm/drm_mipi_dsi.h>
30
31#include "intel_atomic.h"
32#include "intel_combo_phy.h"
33#include "intel_connector.h"
34#include "intel_ddi.h"
35#include "intel_dsi.h"
36#include "intel_panel.h"
37
38static inline int header_credits_available(struct drm_i915_private *dev_priv,
39 enum transcoder dsi_trans)
40{
41 return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK)
42 >> FREE_HEADER_CREDIT_SHIFT;
43}
44
45static inline int payload_credits_available(struct drm_i915_private *dev_priv,
46 enum transcoder dsi_trans)
47{
48 return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK)
49 >> FREE_PLOAD_CREDIT_SHIFT;
50}
51
52static void wait_for_header_credits(struct drm_i915_private *dev_priv,
53 enum transcoder dsi_trans)
54{
55 if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >=
56 MAX_HEADER_CREDIT, 100))
57 DRM_ERROR("DSI header credits not released\n");
58}
59
60static void wait_for_payload_credits(struct drm_i915_private *dev_priv,
61 enum transcoder dsi_trans)
62{
63 if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >=
64 MAX_PLOAD_CREDIT, 100))
65 DRM_ERROR("DSI payload credits not released\n");
66}
67
68static enum transcoder dsi_port_to_transcoder(enum port port)
69{
70 if (port == PORT_A)
71 return TRANSCODER_DSI_0;
72 else
73 return TRANSCODER_DSI_1;
74}
75
76static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder)
77{
78 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
79 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
80 struct mipi_dsi_device *dsi;
81 enum port port;
82 enum transcoder dsi_trans;
83 int ret;
84
85
86 for_each_dsi_port(port, intel_dsi->ports) {
87 dsi_trans = dsi_port_to_transcoder(port);
88 wait_for_header_credits(dev_priv, dsi_trans);
89 wait_for_payload_credits(dev_priv, dsi_trans);
90 }
91
92
93 for_each_dsi_port(port, intel_dsi->ports) {
94 dsi = intel_dsi->dsi_hosts[port]->device;
95 dsi->mode_flags |= MIPI_DSI_MODE_LPM;
96 dsi->channel = 0;
97 ret = mipi_dsi_dcs_nop(dsi);
98 if (ret < 0)
99 DRM_ERROR("error sending DCS NOP command\n");
100 }
101
102
103 for_each_dsi_port(port, intel_dsi->ports) {
104 dsi_trans = dsi_port_to_transcoder(port);
105 wait_for_header_credits(dev_priv, dsi_trans);
106 }
107
108
109 for_each_dsi_port(port, intel_dsi->ports) {
110 dsi_trans = dsi_port_to_transcoder(port);
111 if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans)) &
112 LPTX_IN_PROGRESS), 20))
113 DRM_ERROR("LPTX bit not cleared\n");
114 }
115}
116
117static bool add_payld_to_queue(struct intel_dsi_host *host, const u8 *data,
118 u32 len)
119{
120 struct intel_dsi *intel_dsi = host->intel_dsi;
121 struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev);
122 enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
123 int free_credits;
124 int i, j;
125
126 for (i = 0; i < len; i += 4) {
127 u32 tmp = 0;
128
129 free_credits = payload_credits_available(dev_priv, dsi_trans);
130 if (free_credits < 1) {
131 DRM_ERROR("Payload credit not available\n");
132 return false;
133 }
134
135 for (j = 0; j < min_t(u32, len - i, 4); j++)
136 tmp |= *data++ << 8 * j;
137
138 I915_WRITE(DSI_CMD_TXPYLD(dsi_trans), tmp);
139 }
140
141 return true;
142}
143
144static int dsi_send_pkt_hdr(struct intel_dsi_host *host,
145 struct mipi_dsi_packet pkt, bool enable_lpdt)
146{
147 struct intel_dsi *intel_dsi = host->intel_dsi;
148 struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev);
149 enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
150 u32 tmp;
151 int free_credits;
152
153
154 free_credits = header_credits_available(dev_priv, dsi_trans);
155 if (free_credits < 1) {
156 DRM_ERROR("send pkt header failed, not enough hdr credits\n");
157 return -1;
158 }
159
160 tmp = I915_READ(DSI_CMD_TXHDR(dsi_trans));
161
162 if (pkt.payload)
163 tmp |= PAYLOAD_PRESENT;
164 else
165 tmp &= ~PAYLOAD_PRESENT;
166
167 tmp &= ~VBLANK_FENCE;
168
169 if (enable_lpdt)
170 tmp |= LP_DATA_TRANSFER;
171
172 tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK);
173 tmp |= ((pkt.header[0] & VC_MASK) << VC_SHIFT);
174 tmp |= ((pkt.header[0] & DT_MASK) << DT_SHIFT);
175 tmp |= (pkt.header[1] << PARAM_WC_LOWER_SHIFT);
176 tmp |= (pkt.header[2] << PARAM_WC_UPPER_SHIFT);
177 I915_WRITE(DSI_CMD_TXHDR(dsi_trans), tmp);
178
179 return 0;
180}
181
182static int dsi_send_pkt_payld(struct intel_dsi_host *host,
183 struct mipi_dsi_packet pkt)
184{
185
186 if (pkt.payload_length > MAX_PLOAD_CREDIT * 4) {
187 DRM_ERROR("payload size exceeds max queue limit\n");
188 return -1;
189 }
190
191
192 if (!add_payld_to_queue(host, pkt.payload,
193 pkt.payload_length)) {
194 DRM_ERROR("adding payload to queue failed\n");
195 return -1;
196 }
197
198 return 0;
199}
200
201static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder)
202{
203 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
204 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
205 enum port port;
206 u32 tmp;
207 int lane;
208
209 for_each_dsi_port(port, intel_dsi->ports) {
210
211
212
213
214
215 tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port));
216 tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
217 tmp |= SCALING_MODE_SEL(0x2);
218 tmp |= TAP2_DISABLE | TAP3_DISABLE;
219 tmp |= RTERM_SELECT(0x6);
220 I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp);
221
222 tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port));
223 tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
224 tmp |= SCALING_MODE_SEL(0x2);
225 tmp |= TAP2_DISABLE | TAP3_DISABLE;
226 tmp |= RTERM_SELECT(0x6);
227 I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp);
228
229 tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port));
230 tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
231 RCOMP_SCALAR_MASK);
232 tmp |= SWING_SEL_UPPER(0x2);
233 tmp |= SWING_SEL_LOWER(0x2);
234 tmp |= RCOMP_SCALAR(0x98);
235 I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp);
236
237 tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port));
238 tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
239 RCOMP_SCALAR_MASK);
240 tmp |= SWING_SEL_UPPER(0x2);
241 tmp |= SWING_SEL_LOWER(0x2);
242 tmp |= RCOMP_SCALAR(0x98);
243 I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp);
244
245 tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port));
246 tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
247 CURSOR_COEFF_MASK);
248 tmp |= POST_CURSOR_1(0x0);
249 tmp |= POST_CURSOR_2(0x0);
250 tmp |= CURSOR_COEFF(0x3f);
251 I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp);
252
253 for (lane = 0; lane <= 3; lane++) {
254
255 tmp = I915_READ(ICL_PORT_TX_DW4_LN(lane, port));
256 tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
257 CURSOR_COEFF_MASK);
258 tmp |= POST_CURSOR_1(0x0);
259 tmp |= POST_CURSOR_2(0x0);
260 tmp |= CURSOR_COEFF(0x3f);
261 I915_WRITE(ICL_PORT_TX_DW4_LN(lane, port), tmp);
262 }
263 }
264}
265
266static void configure_dual_link_mode(struct intel_encoder *encoder,
267 const struct intel_crtc_state *pipe_config)
268{
269 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
270 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
271 u32 dss_ctl1;
272
273 dss_ctl1 = I915_READ(DSS_CTL1);
274 dss_ctl1 |= SPLITTER_ENABLE;
275 dss_ctl1 &= ~OVERLAP_PIXELS_MASK;
276 dss_ctl1 |= OVERLAP_PIXELS(intel_dsi->pixel_overlap);
277
278 if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) {
279 const struct drm_display_mode *adjusted_mode =
280 &pipe_config->base.adjusted_mode;
281 u32 dss_ctl2;
282 u16 hactive = adjusted_mode->crtc_hdisplay;
283 u16 dl_buffer_depth;
284
285 dss_ctl1 &= ~DUAL_LINK_MODE_INTERLEAVE;
286 dl_buffer_depth = hactive / 2 + intel_dsi->pixel_overlap;
287
288 if (dl_buffer_depth > MAX_DL_BUFFER_TARGET_DEPTH)
289 DRM_ERROR("DL buffer depth exceed max value\n");
290
291 dss_ctl1 &= ~LEFT_DL_BUF_TARGET_DEPTH_MASK;
292 dss_ctl1 |= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
293 dss_ctl2 = I915_READ(DSS_CTL2);
294 dss_ctl2 &= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK;
295 dss_ctl2 |= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
296 I915_WRITE(DSS_CTL2, dss_ctl2);
297 } else {
298
299 dss_ctl1 |= DUAL_LINK_MODE_INTERLEAVE;
300 }
301
302 I915_WRITE(DSS_CTL1, dss_ctl1);
303}
304
305static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder)
306{
307 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
308 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
309 enum port port;
310 u32 bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
311 u32 afe_clk_khz;
312 u32 esc_clk_div_m;
313
314 afe_clk_khz = DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp,
315 intel_dsi->lane_count);
316
317 esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK);
318
319 for_each_dsi_port(port, intel_dsi->ports) {
320 I915_WRITE(ICL_DSI_ESC_CLK_DIV(port),
321 esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
322 POSTING_READ(ICL_DSI_ESC_CLK_DIV(port));
323 }
324
325 for_each_dsi_port(port, intel_dsi->ports) {
326 I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port),
327 esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
328 POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port));
329 }
330}
331
332static void get_dsi_io_power_domains(struct drm_i915_private *dev_priv,
333 struct intel_dsi *intel_dsi)
334{
335 enum port port;
336
337 for_each_dsi_port(port, intel_dsi->ports) {
338 WARN_ON(intel_dsi->io_wakeref[port]);
339 intel_dsi->io_wakeref[port] =
340 intel_display_power_get(dev_priv,
341 port == PORT_A ?
342 POWER_DOMAIN_PORT_DDI_A_IO :
343 POWER_DOMAIN_PORT_DDI_B_IO);
344 }
345}
346
347static void gen11_dsi_enable_io_power(struct intel_encoder *encoder)
348{
349 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
350 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
351 enum port port;
352 u32 tmp;
353
354 for_each_dsi_port(port, intel_dsi->ports) {
355 tmp = I915_READ(ICL_DSI_IO_MODECTL(port));
356 tmp |= COMBO_PHY_MODE_DSI;
357 I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp);
358 }
359
360 get_dsi_io_power_domains(dev_priv, intel_dsi);
361}
362
363static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder)
364{
365 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
366 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
367 enum port port;
368
369 for_each_dsi_port(port, intel_dsi->ports)
370 intel_combo_phy_power_up_lanes(dev_priv, port, true,
371 intel_dsi->lane_count, false);
372}
373
374static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder)
375{
376 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
377 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
378 enum port port;
379 u32 tmp;
380 int lane;
381
382
383 for_each_dsi_port(port, intel_dsi->ports) {
384 tmp = I915_READ(ICL_PORT_TX_DW4_AUX(port));
385 tmp &= ~LOADGEN_SELECT;
386 I915_WRITE(ICL_PORT_TX_DW4_AUX(port), tmp);
387 for (lane = 0; lane <= 3; lane++) {
388 tmp = I915_READ(ICL_PORT_TX_DW4_LN(lane, port));
389 tmp &= ~LOADGEN_SELECT;
390 if (lane != 2)
391 tmp |= LOADGEN_SELECT;
392 I915_WRITE(ICL_PORT_TX_DW4_LN(lane, port), tmp);
393 }
394 }
395
396
397 for_each_dsi_port(port, intel_dsi->ports) {
398 tmp = I915_READ(ICL_PORT_TX_DW2_AUX(port));
399 tmp &= ~FRC_LATENCY_OPTIM_MASK;
400 tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
401 I915_WRITE(ICL_PORT_TX_DW2_AUX(port), tmp);
402 tmp = I915_READ(ICL_PORT_TX_DW2_LN0(port));
403 tmp &= ~FRC_LATENCY_OPTIM_MASK;
404 tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
405 I915_WRITE(ICL_PORT_TX_DW2_GRP(port), tmp);
406 }
407
408}
409
410static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder)
411{
412 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
413 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
414 u32 tmp;
415 enum port port;
416
417
418 for_each_dsi_port(port, intel_dsi->ports) {
419 tmp = I915_READ(ICL_PORT_PCS_DW1_LN0(port));
420 tmp &= ~COMMON_KEEPER_EN;
421 I915_WRITE(ICL_PORT_PCS_DW1_GRP(port), tmp);
422 tmp = I915_READ(ICL_PORT_PCS_DW1_AUX(port));
423 tmp &= ~COMMON_KEEPER_EN;
424 I915_WRITE(ICL_PORT_PCS_DW1_AUX(port), tmp);
425 }
426
427
428
429
430
431
432 for_each_dsi_port(port, intel_dsi->ports) {
433 tmp = I915_READ(ICL_PORT_CL_DW5(port));
434 tmp |= SUS_CLOCK_CONFIG;
435 I915_WRITE(ICL_PORT_CL_DW5(port), tmp);
436 }
437
438
439 for_each_dsi_port(port, intel_dsi->ports) {
440 tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port));
441 tmp &= ~TX_TRAINING_EN;
442 I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp);
443 tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port));
444 tmp &= ~TX_TRAINING_EN;
445 I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp);
446 }
447
448
449 dsi_program_swing_and_deemphasis(encoder);
450
451
452 for_each_dsi_port(port, intel_dsi->ports) {
453 tmp = I915_READ(ICL_PORT_TX_DW5_LN0(port));
454 tmp |= TX_TRAINING_EN;
455 I915_WRITE(ICL_PORT_TX_DW5_GRP(port), tmp);
456 tmp = I915_READ(ICL_PORT_TX_DW5_AUX(port));
457 tmp |= TX_TRAINING_EN;
458 I915_WRITE(ICL_PORT_TX_DW5_AUX(port), tmp);
459 }
460}
461
462static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder)
463{
464 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
465 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
466 u32 tmp;
467 enum port port;
468
469 for_each_dsi_port(port, intel_dsi->ports) {
470 tmp = I915_READ(DDI_BUF_CTL(port));
471 tmp |= DDI_BUF_CTL_ENABLE;
472 I915_WRITE(DDI_BUF_CTL(port), tmp);
473
474 if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port)) &
475 DDI_BUF_IS_IDLE),
476 500))
477 DRM_ERROR("DDI port:%c buffer idle\n", port_name(port));
478 }
479}
480
481static void gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder)
482{
483 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
484 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
485 u32 tmp;
486 enum port port;
487
488
489 for_each_dsi_port(port, intel_dsi->ports) {
490 tmp = I915_READ(ICL_DSI_T_INIT_MASTER(port));
491 tmp &= ~MASTER_INIT_TIMER_MASK;
492 tmp |= intel_dsi->init_count;
493 I915_WRITE(ICL_DSI_T_INIT_MASTER(port), tmp);
494 }
495
496
497 for_each_dsi_port(port, intel_dsi->ports) {
498 I915_WRITE(DPHY_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg);
499
500
501 I915_WRITE(DSI_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg);
502 }
503
504
505 for_each_dsi_port(port, intel_dsi->ports) {
506 I915_WRITE(DPHY_DATA_TIMING_PARAM(port),
507 intel_dsi->dphy_data_lane_reg);
508
509
510 I915_WRITE(DSI_DATA_TIMING_PARAM(port),
511 intel_dsi->dphy_data_lane_reg);
512 }
513
514
515
516
517
518
519
520 if (intel_dsi_bitrate(intel_dsi) <= 800000) {
521 for_each_dsi_port(port, intel_dsi->ports) {
522 tmp = I915_READ(DPHY_TA_TIMING_PARAM(port));
523 tmp &= ~TA_SURE_MASK;
524 tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
525 I915_WRITE(DPHY_TA_TIMING_PARAM(port), tmp);
526
527
528 tmp = I915_READ(DSI_TA_TIMING_PARAM(port));
529 tmp &= ~TA_SURE_MASK;
530 tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
531 I915_WRITE(DSI_TA_TIMING_PARAM(port), tmp);
532 }
533 }
534}
535
536static void gen11_dsi_gate_clocks(struct intel_encoder *encoder)
537{
538 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
539 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
540 u32 tmp;
541 enum port port;
542
543 mutex_lock(&dev_priv->dpll_lock);
544 tmp = I915_READ(DPCLKA_CFGCR0_ICL);
545 for_each_dsi_port(port, intel_dsi->ports) {
546 tmp |= DPCLKA_CFGCR0_DDI_CLK_OFF(port);
547 }
548
549 I915_WRITE(DPCLKA_CFGCR0_ICL, tmp);
550 mutex_unlock(&dev_priv->dpll_lock);
551}
552
553static void gen11_dsi_ungate_clocks(struct intel_encoder *encoder)
554{
555 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
556 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
557 u32 tmp;
558 enum port port;
559
560 mutex_lock(&dev_priv->dpll_lock);
561 tmp = I915_READ(DPCLKA_CFGCR0_ICL);
562 for_each_dsi_port(port, intel_dsi->ports) {
563 tmp &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port);
564 }
565
566 I915_WRITE(DPCLKA_CFGCR0_ICL, tmp);
567 mutex_unlock(&dev_priv->dpll_lock);
568}
569
570static void gen11_dsi_map_pll(struct intel_encoder *encoder,
571 const struct intel_crtc_state *crtc_state)
572{
573 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
574 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
575 struct intel_shared_dpll *pll = crtc_state->shared_dpll;
576 enum port port;
577 u32 val;
578
579 mutex_lock(&dev_priv->dpll_lock);
580
581 val = I915_READ(DPCLKA_CFGCR0_ICL);
582 for_each_dsi_port(port, intel_dsi->ports) {
583 val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port);
584 val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port);
585 }
586 I915_WRITE(DPCLKA_CFGCR0_ICL, val);
587
588 for_each_dsi_port(port, intel_dsi->ports) {
589 val &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port);
590 }
591 I915_WRITE(DPCLKA_CFGCR0_ICL, val);
592
593 POSTING_READ(DPCLKA_CFGCR0_ICL);
594
595 mutex_unlock(&dev_priv->dpll_lock);
596}
597
598static void
599gen11_dsi_configure_transcoder(struct intel_encoder *encoder,
600 const struct intel_crtc_state *pipe_config)
601{
602 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
603 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
604 struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->base.crtc);
605 enum pipe pipe = intel_crtc->pipe;
606 u32 tmp;
607 enum port port;
608 enum transcoder dsi_trans;
609
610 for_each_dsi_port(port, intel_dsi->ports) {
611 dsi_trans = dsi_port_to_transcoder(port);
612 tmp = I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans));
613
614 if (intel_dsi->eotp_pkt)
615 tmp &= ~EOTP_DISABLED;
616 else
617 tmp |= EOTP_DISABLED;
618
619
620 if (intel_dsi_bitrate(intel_dsi) >= 1500 * 1000) {
621 tmp &= ~LINK_CALIBRATION_MASK;
622 tmp |= CALIBRATION_ENABLED_INITIAL_ONLY;
623 }
624
625
626 tmp &= ~CONTINUOUS_CLK_MASK;
627 if (intel_dsi->clock_stop)
628 tmp |= CLK_ENTER_LP_AFTER_DATA;
629 else
630 tmp |= CLK_HS_CONTINUOUS;
631
632
633 tmp &= ~PIX_BUF_THRESHOLD_MASK;
634 tmp |= PIX_BUF_THRESHOLD_1_4;
635
636
637 tmp &= ~PIX_VIRT_CHAN_MASK;
638 tmp |= PIX_VIRT_CHAN(0);
639
640
641 if (intel_dsi->bgr_enabled)
642 tmp |= BGR_TRANSMISSION;
643
644
645 tmp &= ~PIX_FMT_MASK;
646 switch (intel_dsi->pixel_format) {
647 default:
648 MISSING_CASE(intel_dsi->pixel_format);
649
650 case MIPI_DSI_FMT_RGB565:
651 tmp |= PIX_FMT_RGB565;
652 break;
653 case MIPI_DSI_FMT_RGB666_PACKED:
654 tmp |= PIX_FMT_RGB666_PACKED;
655 break;
656 case MIPI_DSI_FMT_RGB666:
657 tmp |= PIX_FMT_RGB666_LOOSE;
658 break;
659 case MIPI_DSI_FMT_RGB888:
660 tmp |= PIX_FMT_RGB888;
661 break;
662 }
663
664
665 if (is_vid_mode(intel_dsi)) {
666 tmp &= ~OP_MODE_MASK;
667 switch (intel_dsi->video_mode_format) {
668 default:
669 MISSING_CASE(intel_dsi->video_mode_format);
670
671 case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS:
672 tmp |= VIDEO_MODE_SYNC_EVENT;
673 break;
674 case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE:
675 tmp |= VIDEO_MODE_SYNC_PULSE;
676 break;
677 }
678 }
679
680 I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans), tmp);
681 }
682
683
684 if (intel_dsi->dual_link) {
685 for_each_dsi_port(port, intel_dsi->ports) {
686 dsi_trans = dsi_port_to_transcoder(port);
687 tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans));
688 tmp |= PORT_SYNC_MODE_ENABLE;
689 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
690 }
691
692
693 configure_dual_link_mode(encoder, pipe_config);
694 }
695
696 for_each_dsi_port(port, intel_dsi->ports) {
697 dsi_trans = dsi_port_to_transcoder(port);
698
699
700 tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
701 tmp &= ~DDI_PORT_WIDTH_MASK;
702 tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count);
703
704
705 tmp &= ~TRANS_DDI_EDP_INPUT_MASK;
706 switch (pipe) {
707 default:
708 MISSING_CASE(pipe);
709
710 case PIPE_A:
711 tmp |= TRANS_DDI_EDP_INPUT_A_ON;
712 break;
713 case PIPE_B:
714 tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
715 break;
716 case PIPE_C:
717 tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
718 break;
719 }
720
721
722 tmp |= TRANS_DDI_FUNC_ENABLE;
723 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
724 }
725
726
727 for_each_dsi_port(port, intel_dsi->ports) {
728 dsi_trans = dsi_port_to_transcoder(port);
729 if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)) &
730 LINK_READY), 2500))
731 DRM_ERROR("DSI link not ready\n");
732 }
733}
734
735static void
736gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder,
737 const struct intel_crtc_state *pipe_config)
738{
739 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
740 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
741 const struct drm_display_mode *adjusted_mode =
742 &pipe_config->base.adjusted_mode;
743 enum port port;
744 enum transcoder dsi_trans;
745
746 u16 htotal, hactive, hsync_start, hsync_end, hsync_size;
747 u16 hfront_porch, hback_porch;
748
749 u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift;
750
751 hactive = adjusted_mode->crtc_hdisplay;
752 htotal = adjusted_mode->crtc_htotal;
753 hsync_start = adjusted_mode->crtc_hsync_start;
754 hsync_end = adjusted_mode->crtc_hsync_end;
755 hsync_size = hsync_end - hsync_start;
756 hfront_porch = (adjusted_mode->crtc_hsync_start -
757 adjusted_mode->crtc_hdisplay);
758 hback_porch = (adjusted_mode->crtc_htotal -
759 adjusted_mode->crtc_hsync_end);
760 vactive = adjusted_mode->crtc_vdisplay;
761 vtotal = adjusted_mode->crtc_vtotal;
762 vsync_start = adjusted_mode->crtc_vsync_start;
763 vsync_end = adjusted_mode->crtc_vsync_end;
764 vsync_shift = hsync_start - htotal / 2;
765
766 if (intel_dsi->dual_link) {
767 hactive /= 2;
768 if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
769 hactive += intel_dsi->pixel_overlap;
770 htotal /= 2;
771 }
772
773
774 if (adjusted_mode->crtc_hdisplay < 256)
775 DRM_ERROR("hactive is less then 256 pixels\n");
776
777
778 if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0)
779 DRM_ERROR("hactive pixels are not multiple of 4\n");
780
781
782 for_each_dsi_port(port, intel_dsi->ports) {
783 dsi_trans = dsi_port_to_transcoder(port);
784 I915_WRITE(HTOTAL(dsi_trans),
785 (hactive - 1) | ((htotal - 1) << 16));
786 }
787
788
789 if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) {
790 if (intel_dsi->video_mode_format ==
791 VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE) {
792
793 if (hsync_size < 16)
794 DRM_ERROR("hsync size < 16 pixels\n");
795 }
796
797 if (hback_porch < 16)
798 DRM_ERROR("hback porch < 16 pixels\n");
799
800 if (intel_dsi->dual_link) {
801 hsync_start /= 2;
802 hsync_end /= 2;
803 }
804
805 for_each_dsi_port(port, intel_dsi->ports) {
806 dsi_trans = dsi_port_to_transcoder(port);
807 I915_WRITE(HSYNC(dsi_trans),
808 (hsync_start - 1) | ((hsync_end - 1) << 16));
809 }
810 }
811
812
813 for_each_dsi_port(port, intel_dsi->ports) {
814 dsi_trans = dsi_port_to_transcoder(port);
815
816
817
818
819
820
821 I915_WRITE(VTOTAL(dsi_trans),
822 (vactive - 1) | ((vtotal - 1) << 16));
823 }
824
825 if (vsync_end < vsync_start || vsync_end > vtotal)
826 DRM_ERROR("Invalid vsync_end value\n");
827
828 if (vsync_start < vactive)
829 DRM_ERROR("vsync_start less than vactive\n");
830
831
832 for_each_dsi_port(port, intel_dsi->ports) {
833 dsi_trans = dsi_port_to_transcoder(port);
834 I915_WRITE(VSYNC(dsi_trans),
835 (vsync_start - 1) | ((vsync_end - 1) << 16));
836 }
837
838
839
840
841
842
843
844 for_each_dsi_port(port, intel_dsi->ports) {
845 dsi_trans = dsi_port_to_transcoder(port);
846 I915_WRITE(VSYNCSHIFT(dsi_trans), vsync_shift);
847 }
848}
849
850static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder)
851{
852 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
853 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
854 enum port port;
855 enum transcoder dsi_trans;
856 u32 tmp;
857
858 for_each_dsi_port(port, intel_dsi->ports) {
859 dsi_trans = dsi_port_to_transcoder(port);
860 tmp = I915_READ(PIPECONF(dsi_trans));
861 tmp |= PIPECONF_ENABLE;
862 I915_WRITE(PIPECONF(dsi_trans), tmp);
863
864
865 if (intel_wait_for_register(&dev_priv->uncore,
866 PIPECONF(dsi_trans),
867 I965_PIPECONF_ACTIVE,
868 I965_PIPECONF_ACTIVE, 10))
869 DRM_ERROR("DSI transcoder not enabled\n");
870 }
871}
872
873static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder)
874{
875 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
876 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
877 enum port port;
878 enum transcoder dsi_trans;
879 u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul;
880
881
882
883
884
885
886
887
888 divisor = intel_dsi_tlpx_ns(intel_dsi) * intel_dsi_bitrate(intel_dsi) * 1000;
889 mul = 8 * 1000000;
890 hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul,
891 divisor);
892 lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor);
893 ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor);
894
895 for_each_dsi_port(port, intel_dsi->ports) {
896 dsi_trans = dsi_port_to_transcoder(port);
897
898
899 tmp = I915_READ(DSI_HSTX_TO(dsi_trans));
900 tmp &= ~HSTX_TIMEOUT_VALUE_MASK;
901 tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout);
902 I915_WRITE(DSI_HSTX_TO(dsi_trans), tmp);
903
904
905
906
907 tmp = I915_READ(DSI_LPRX_HOST_TO(dsi_trans));
908 tmp &= ~LPRX_TIMEOUT_VALUE_MASK;
909 tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout);
910 I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans), tmp);
911
912
913
914
915 tmp = I915_READ(DSI_TA_TO(dsi_trans));
916 tmp &= ~TA_TIMEOUT_VALUE_MASK;
917 tmp |= TA_TIMEOUT_VALUE(ta_timeout);
918 I915_WRITE(DSI_TA_TO(dsi_trans), tmp);
919 }
920}
921
922static void
923gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder,
924 const struct intel_crtc_state *pipe_config)
925{
926
927 gen11_dsi_power_up_lanes(encoder);
928
929
930 gen11_dsi_config_phy_lanes_sequence(encoder);
931
932
933 gen11_dsi_voltage_swing_program_seq(encoder);
934
935
936 gen11_dsi_enable_ddi_buffer(encoder);
937
938
939 gen11_dsi_setup_dphy_timings(encoder);
940
941
942 gen11_dsi_setup_timeouts(encoder);
943
944
945 gen11_dsi_configure_transcoder(encoder, pipe_config);
946
947
948 gen11_dsi_gate_clocks(encoder);
949}
950
951static void gen11_dsi_powerup_panel(struct intel_encoder *encoder)
952{
953 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
954 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
955 struct mipi_dsi_device *dsi;
956 enum port port;
957 enum transcoder dsi_trans;
958 u32 tmp;
959 int ret;
960
961
962 for_each_dsi_port(port, intel_dsi->ports) {
963 dsi_trans = dsi_port_to_transcoder(port);
964
965
966
967
968
969 tmp = I915_READ(DSI_CMD_RXCTL(dsi_trans));
970 tmp &= NUMBER_RX_PLOAD_DW_MASK;
971
972 tmp = tmp * 4;
973 dsi = intel_dsi->dsi_hosts[port]->device;
974 ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp);
975 if (ret < 0)
976 DRM_ERROR("error setting max return pkt size%d\n", tmp);
977 }
978
979
980 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON);
981 intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay);
982 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET);
983 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP);
984 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON);
985
986
987 wait_for_cmds_dispatched_to_panel(encoder);
988}
989
990static void gen11_dsi_pre_pll_enable(struct intel_encoder *encoder,
991 const struct intel_crtc_state *pipe_config,
992 const struct drm_connector_state *conn_state)
993{
994
995 gen11_dsi_enable_io_power(encoder);
996
997
998 gen11_dsi_program_esc_clk_div(encoder);
999}
1000
1001static void gen11_dsi_pre_enable(struct intel_encoder *encoder,
1002 const struct intel_crtc_state *pipe_config,
1003 const struct drm_connector_state *conn_state)
1004{
1005 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1006
1007
1008 gen11_dsi_map_pll(encoder, pipe_config);
1009
1010
1011 gen11_dsi_enable_port_and_phy(encoder, pipe_config);
1012
1013
1014 gen11_dsi_powerup_panel(encoder);
1015
1016
1017 gen11_dsi_set_transcoder_timings(encoder, pipe_config);
1018
1019
1020 gen11_dsi_enable_transcoder(encoder);
1021
1022
1023 intel_panel_enable_backlight(pipe_config, conn_state);
1024 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON);
1025}
1026
1027static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder)
1028{
1029 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1030 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1031 enum port port;
1032 enum transcoder dsi_trans;
1033 u32 tmp;
1034
1035 for_each_dsi_port(port, intel_dsi->ports) {
1036 dsi_trans = dsi_port_to_transcoder(port);
1037
1038
1039 tmp = I915_READ(PIPECONF(dsi_trans));
1040 tmp &= ~PIPECONF_ENABLE;
1041 I915_WRITE(PIPECONF(dsi_trans), tmp);
1042
1043
1044 if (intel_wait_for_register(&dev_priv->uncore,
1045 PIPECONF(dsi_trans),
1046 I965_PIPECONF_ACTIVE, 0, 50))
1047 DRM_ERROR("DSI trancoder not disabled\n");
1048 }
1049}
1050
1051static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder)
1052{
1053 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1054
1055 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF);
1056 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET);
1057 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF);
1058
1059
1060 wait_for_cmds_dispatched_to_panel(encoder);
1061}
1062
1063static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder)
1064{
1065 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1066 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1067 enum port port;
1068 enum transcoder dsi_trans;
1069 u32 tmp;
1070
1071
1072 for_each_dsi_port(port, intel_dsi->ports) {
1073 dsi_trans = dsi_port_to_transcoder(port);
1074 tmp = I915_READ(DSI_LP_MSG(dsi_trans));
1075 tmp |= LINK_ENTER_ULPS;
1076 tmp &= ~LINK_ULPS_TYPE_LP11;
1077 I915_WRITE(DSI_LP_MSG(dsi_trans), tmp);
1078
1079 if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans)) &
1080 LINK_IN_ULPS),
1081 10))
1082 DRM_ERROR("DSI link not in ULPS\n");
1083 }
1084
1085
1086 for_each_dsi_port(port, intel_dsi->ports) {
1087 dsi_trans = dsi_port_to_transcoder(port);
1088 tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
1089 tmp &= ~TRANS_DDI_FUNC_ENABLE;
1090 I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
1091 }
1092
1093
1094 if (intel_dsi->dual_link) {
1095 for_each_dsi_port(port, intel_dsi->ports) {
1096 dsi_trans = dsi_port_to_transcoder(port);
1097 tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans));
1098 tmp &= ~PORT_SYNC_MODE_ENABLE;
1099 I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
1100 }
1101 }
1102}
1103
1104static void gen11_dsi_disable_port(struct intel_encoder *encoder)
1105{
1106 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1107 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1108 u32 tmp;
1109 enum port port;
1110
1111 gen11_dsi_ungate_clocks(encoder);
1112 for_each_dsi_port(port, intel_dsi->ports) {
1113 tmp = I915_READ(DDI_BUF_CTL(port));
1114 tmp &= ~DDI_BUF_CTL_ENABLE;
1115 I915_WRITE(DDI_BUF_CTL(port), tmp);
1116
1117 if (wait_for_us((I915_READ(DDI_BUF_CTL(port)) &
1118 DDI_BUF_IS_IDLE),
1119 8))
1120 DRM_ERROR("DDI port:%c buffer not idle\n",
1121 port_name(port));
1122 }
1123 gen11_dsi_gate_clocks(encoder);
1124}
1125
1126static void gen11_dsi_disable_io_power(struct intel_encoder *encoder)
1127{
1128 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1129 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1130 enum port port;
1131 u32 tmp;
1132
1133 for_each_dsi_port(port, intel_dsi->ports) {
1134 intel_wakeref_t wakeref;
1135
1136 wakeref = fetch_and_zero(&intel_dsi->io_wakeref[port]);
1137 intel_display_power_put(dev_priv,
1138 port == PORT_A ?
1139 POWER_DOMAIN_PORT_DDI_A_IO :
1140 POWER_DOMAIN_PORT_DDI_B_IO,
1141 wakeref);
1142 }
1143
1144
1145 for_each_dsi_port(port, intel_dsi->ports) {
1146 tmp = I915_READ(ICL_DSI_IO_MODECTL(port));
1147 tmp &= ~COMBO_PHY_MODE_DSI;
1148 I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp);
1149 }
1150}
1151
1152static void gen11_dsi_disable(struct intel_encoder *encoder,
1153 const struct intel_crtc_state *old_crtc_state,
1154 const struct drm_connector_state *old_conn_state)
1155{
1156 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1157
1158
1159 intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF);
1160 intel_panel_disable_backlight(old_conn_state);
1161
1162
1163 gen11_dsi_disable_transcoder(encoder);
1164
1165
1166 gen11_dsi_powerdown_panel(encoder);
1167
1168
1169 gen11_dsi_deconfigure_trancoder(encoder);
1170
1171
1172 gen11_dsi_disable_port(encoder);
1173
1174
1175 gen11_dsi_disable_io_power(encoder);
1176}
1177
1178static void gen11_dsi_get_timings(struct intel_encoder *encoder,
1179 struct intel_crtc_state *pipe_config)
1180{
1181 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1182 struct drm_display_mode *adjusted_mode =
1183 &pipe_config->base.adjusted_mode;
1184
1185 if (intel_dsi->dual_link) {
1186 adjusted_mode->crtc_hdisplay *= 2;
1187 if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
1188 adjusted_mode->crtc_hdisplay -=
1189 intel_dsi->pixel_overlap;
1190 adjusted_mode->crtc_htotal *= 2;
1191 }
1192 adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hdisplay;
1193 adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_htotal;
1194
1195 if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) {
1196 if (intel_dsi->dual_link) {
1197 adjusted_mode->crtc_hsync_start *= 2;
1198 adjusted_mode->crtc_hsync_end *= 2;
1199 }
1200 }
1201 adjusted_mode->crtc_vblank_start = adjusted_mode->crtc_vdisplay;
1202 adjusted_mode->crtc_vblank_end = adjusted_mode->crtc_vtotal;
1203}
1204
1205static void gen11_dsi_get_config(struct intel_encoder *encoder,
1206 struct intel_crtc_state *pipe_config)
1207{
1208 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1209 struct intel_crtc *crtc = to_intel_crtc(pipe_config->base.crtc);
1210 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1211
1212
1213 pipe_config->port_clock =
1214 cnl_calc_wrpll_link(dev_priv, &pipe_config->dpll_hw_state);
1215
1216 pipe_config->base.adjusted_mode.crtc_clock = intel_dsi->pclk;
1217 if (intel_dsi->dual_link)
1218 pipe_config->base.adjusted_mode.crtc_clock *= 2;
1219
1220 gen11_dsi_get_timings(encoder, pipe_config);
1221 pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI);
1222 pipe_config->pipe_bpp = bdw_get_pipemisc_bpp(crtc);
1223}
1224
1225static int gen11_dsi_compute_config(struct intel_encoder *encoder,
1226 struct intel_crtc_state *pipe_config,
1227 struct drm_connector_state *conn_state)
1228{
1229 struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi,
1230 base);
1231 struct intel_connector *intel_connector = intel_dsi->attached_connector;
1232 struct intel_crtc *crtc = to_intel_crtc(pipe_config->base.crtc);
1233 const struct drm_display_mode *fixed_mode =
1234 intel_connector->panel.fixed_mode;
1235 struct drm_display_mode *adjusted_mode =
1236 &pipe_config->base.adjusted_mode;
1237
1238 pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
1239 intel_fixed_panel_mode(fixed_mode, adjusted_mode);
1240 intel_pch_panel_fitting(crtc, pipe_config, conn_state->scaling_mode);
1241
1242 adjusted_mode->flags = 0;
1243
1244
1245 if (intel_dsi->ports == BIT(PORT_B))
1246 pipe_config->cpu_transcoder = TRANSCODER_DSI_1;
1247 else
1248 pipe_config->cpu_transcoder = TRANSCODER_DSI_0;
1249
1250 pipe_config->clock_set = true;
1251 pipe_config->port_clock = intel_dsi_bitrate(intel_dsi) / 5;
1252
1253 return 0;
1254}
1255
1256static void gen11_dsi_get_power_domains(struct intel_encoder *encoder,
1257 struct intel_crtc_state *crtc_state)
1258{
1259 get_dsi_io_power_domains(to_i915(encoder->base.dev),
1260 enc_to_intel_dsi(&encoder->base));
1261}
1262
1263static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder,
1264 enum pipe *pipe)
1265{
1266 struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1267 struct intel_dsi *intel_dsi = enc_to_intel_dsi(&encoder->base);
1268 enum transcoder dsi_trans;
1269 intel_wakeref_t wakeref;
1270 enum port port;
1271 bool ret = false;
1272 u32 tmp;
1273
1274 wakeref = intel_display_power_get_if_enabled(dev_priv,
1275 encoder->power_domain);
1276 if (!wakeref)
1277 return false;
1278
1279 for_each_dsi_port(port, intel_dsi->ports) {
1280 dsi_trans = dsi_port_to_transcoder(port);
1281 tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
1282 switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
1283 case TRANS_DDI_EDP_INPUT_A_ON:
1284 *pipe = PIPE_A;
1285 break;
1286 case TRANS_DDI_EDP_INPUT_B_ONOFF:
1287 *pipe = PIPE_B;
1288 break;
1289 case TRANS_DDI_EDP_INPUT_C_ONOFF:
1290 *pipe = PIPE_C;
1291 break;
1292 default:
1293 DRM_ERROR("Invalid PIPE input\n");
1294 goto out;
1295 }
1296
1297 tmp = I915_READ(PIPECONF(dsi_trans));
1298 ret = tmp & PIPECONF_ENABLE;
1299 }
1300out:
1301 intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
1302 return ret;
1303}
1304
1305static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder)
1306{
1307 intel_encoder_destroy(encoder);
1308}
1309
1310static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = {
1311 .destroy = gen11_dsi_encoder_destroy,
1312};
1313
1314static const struct drm_connector_funcs gen11_dsi_connector_funcs = {
1315 .late_register = intel_connector_register,
1316 .early_unregister = intel_connector_unregister,
1317 .destroy = intel_connector_destroy,
1318 .fill_modes = drm_helper_probe_single_connector_modes,
1319 .atomic_get_property = intel_digital_connector_atomic_get_property,
1320 .atomic_set_property = intel_digital_connector_atomic_set_property,
1321 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
1322 .atomic_duplicate_state = intel_digital_connector_duplicate_state,
1323};
1324
1325static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = {
1326 .get_modes = intel_dsi_get_modes,
1327 .mode_valid = intel_dsi_mode_valid,
1328 .atomic_check = intel_digital_connector_atomic_check,
1329};
1330
1331static int gen11_dsi_host_attach(struct mipi_dsi_host *host,
1332 struct mipi_dsi_device *dsi)
1333{
1334 return 0;
1335}
1336
1337static int gen11_dsi_host_detach(struct mipi_dsi_host *host,
1338 struct mipi_dsi_device *dsi)
1339{
1340 return 0;
1341}
1342
1343static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host,
1344 const struct mipi_dsi_msg *msg)
1345{
1346 struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host);
1347 struct mipi_dsi_packet dsi_pkt;
1348 ssize_t ret;
1349 bool enable_lpdt = false;
1350
1351 ret = mipi_dsi_create_packet(&dsi_pkt, msg);
1352 if (ret < 0)
1353 return ret;
1354
1355 if (msg->flags & MIPI_DSI_MSG_USE_LPM)
1356 enable_lpdt = true;
1357
1358
1359 ret = dsi_send_pkt_hdr(intel_dsi_host, dsi_pkt, enable_lpdt);
1360 if (ret < 0)
1361 return ret;
1362
1363
1364 if (mipi_dsi_packet_format_is_long(msg->type)) {
1365 ret = dsi_send_pkt_payld(intel_dsi_host, dsi_pkt);
1366 if (ret < 0)
1367 return ret;
1368 }
1369
1370
1371
1372 ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length;
1373
1374 return ret;
1375}
1376
1377static const struct mipi_dsi_host_ops gen11_dsi_host_ops = {
1378 .attach = gen11_dsi_host_attach,
1379 .detach = gen11_dsi_host_detach,
1380 .transfer = gen11_dsi_host_transfer,
1381};
1382
1383#define ICL_PREPARE_CNT_MAX 0x7
1384#define ICL_CLK_ZERO_CNT_MAX 0xf
1385#define ICL_TRAIL_CNT_MAX 0x7
1386#define ICL_TCLK_PRE_CNT_MAX 0x3
1387#define ICL_TCLK_POST_CNT_MAX 0x7
1388#define ICL_HS_ZERO_CNT_MAX 0xf
1389#define ICL_EXIT_ZERO_CNT_MAX 0x7
1390
1391static void icl_dphy_param_init(struct intel_dsi *intel_dsi)
1392{
1393 struct drm_device *dev = intel_dsi->base.base.dev;
1394 struct drm_i915_private *dev_priv = to_i915(dev);
1395 struct mipi_config *mipi_config = dev_priv->vbt.dsi.config;
1396 u32 tlpx_ns;
1397 u32 prepare_cnt, exit_zero_cnt, clk_zero_cnt, trail_cnt;
1398 u32 ths_prepare_ns, tclk_trail_ns;
1399 u32 hs_zero_cnt;
1400 u32 tclk_pre_cnt, tclk_post_cnt;
1401
1402 tlpx_ns = intel_dsi_tlpx_ns(intel_dsi);
1403
1404 tclk_trail_ns = max(mipi_config->tclk_trail, mipi_config->ths_trail);
1405 ths_prepare_ns = max(mipi_config->ths_prepare,
1406 mipi_config->tclk_prepare);
1407
1408
1409
1410
1411
1412
1413
1414
1415 prepare_cnt = DIV_ROUND_UP(ths_prepare_ns * 4, tlpx_ns);
1416 if (prepare_cnt > ICL_PREPARE_CNT_MAX) {
1417 DRM_DEBUG_KMS("prepare_cnt out of range (%d)\n", prepare_cnt);
1418 prepare_cnt = ICL_PREPARE_CNT_MAX;
1419 }
1420
1421
1422 clk_zero_cnt = DIV_ROUND_UP(mipi_config->tclk_prepare_clkzero -
1423 ths_prepare_ns, tlpx_ns);
1424 if (clk_zero_cnt > ICL_CLK_ZERO_CNT_MAX) {
1425 DRM_DEBUG_KMS("clk_zero_cnt out of range (%d)\n", clk_zero_cnt);
1426 clk_zero_cnt = ICL_CLK_ZERO_CNT_MAX;
1427 }
1428
1429
1430 trail_cnt = DIV_ROUND_UP(tclk_trail_ns, tlpx_ns);
1431 if (trail_cnt > ICL_TRAIL_CNT_MAX) {
1432 DRM_DEBUG_KMS("trail_cnt out of range (%d)\n", trail_cnt);
1433 trail_cnt = ICL_TRAIL_CNT_MAX;
1434 }
1435
1436
1437 tclk_pre_cnt = DIV_ROUND_UP(mipi_config->tclk_pre, tlpx_ns);
1438 if (tclk_pre_cnt > ICL_TCLK_PRE_CNT_MAX) {
1439 DRM_DEBUG_KMS("tclk_pre_cnt out of range (%d)\n", tclk_pre_cnt);
1440 tclk_pre_cnt = ICL_TCLK_PRE_CNT_MAX;
1441 }
1442
1443
1444 tclk_post_cnt = DIV_ROUND_UP(mipi_config->tclk_post, tlpx_ns);
1445 if (tclk_post_cnt > ICL_TCLK_POST_CNT_MAX) {
1446 DRM_DEBUG_KMS("tclk_post_cnt out of range (%d)\n", tclk_post_cnt);
1447 tclk_post_cnt = ICL_TCLK_POST_CNT_MAX;
1448 }
1449
1450
1451 hs_zero_cnt = DIV_ROUND_UP(mipi_config->ths_prepare_hszero -
1452 ths_prepare_ns, tlpx_ns);
1453 if (hs_zero_cnt > ICL_HS_ZERO_CNT_MAX) {
1454 DRM_DEBUG_KMS("hs_zero_cnt out of range (%d)\n", hs_zero_cnt);
1455 hs_zero_cnt = ICL_HS_ZERO_CNT_MAX;
1456 }
1457
1458
1459 exit_zero_cnt = DIV_ROUND_UP(mipi_config->ths_exit, tlpx_ns);
1460 if (exit_zero_cnt > ICL_EXIT_ZERO_CNT_MAX) {
1461 DRM_DEBUG_KMS("exit_zero_cnt out of range (%d)\n", exit_zero_cnt);
1462 exit_zero_cnt = ICL_EXIT_ZERO_CNT_MAX;
1463 }
1464
1465
1466 intel_dsi->dphy_reg = (CLK_PREPARE_OVERRIDE |
1467 CLK_PREPARE(prepare_cnt) |
1468 CLK_ZERO_OVERRIDE |
1469 CLK_ZERO(clk_zero_cnt) |
1470 CLK_PRE_OVERRIDE |
1471 CLK_PRE(tclk_pre_cnt) |
1472 CLK_POST_OVERRIDE |
1473 CLK_POST(tclk_post_cnt) |
1474 CLK_TRAIL_OVERRIDE |
1475 CLK_TRAIL(trail_cnt));
1476
1477
1478 intel_dsi->dphy_data_lane_reg = (HS_PREPARE_OVERRIDE |
1479 HS_PREPARE(prepare_cnt) |
1480 HS_ZERO_OVERRIDE |
1481 HS_ZERO(hs_zero_cnt) |
1482 HS_TRAIL_OVERRIDE |
1483 HS_TRAIL(trail_cnt) |
1484 HS_EXIT_OVERRIDE |
1485 HS_EXIT(exit_zero_cnt));
1486
1487 intel_dsi_log_params(intel_dsi);
1488}
1489
1490void icl_dsi_init(struct drm_i915_private *dev_priv)
1491{
1492 struct drm_device *dev = &dev_priv->drm;
1493 struct intel_dsi *intel_dsi;
1494 struct intel_encoder *encoder;
1495 struct intel_connector *intel_connector;
1496 struct drm_connector *connector;
1497 struct drm_display_mode *fixed_mode;
1498 enum port port;
1499
1500 if (!intel_bios_is_dsi_present(dev_priv, &port))
1501 return;
1502
1503 intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL);
1504 if (!intel_dsi)
1505 return;
1506
1507 intel_connector = intel_connector_alloc();
1508 if (!intel_connector) {
1509 kfree(intel_dsi);
1510 return;
1511 }
1512
1513 encoder = &intel_dsi->base;
1514 intel_dsi->attached_connector = intel_connector;
1515 connector = &intel_connector->base;
1516
1517
1518 drm_encoder_init(dev, &encoder->base, &gen11_dsi_encoder_funcs,
1519 DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port));
1520
1521 encoder->pre_pll_enable = gen11_dsi_pre_pll_enable;
1522 encoder->pre_enable = gen11_dsi_pre_enable;
1523 encoder->disable = gen11_dsi_disable;
1524 encoder->port = port;
1525 encoder->get_config = gen11_dsi_get_config;
1526 encoder->update_pipe = intel_panel_update_backlight;
1527 encoder->compute_config = gen11_dsi_compute_config;
1528 encoder->get_hw_state = gen11_dsi_get_hw_state;
1529 encoder->type = INTEL_OUTPUT_DSI;
1530 encoder->cloneable = 0;
1531 encoder->crtc_mask = BIT(PIPE_A) | BIT(PIPE_B) | BIT(PIPE_C);
1532 encoder->power_domain = POWER_DOMAIN_PORT_DSI;
1533 encoder->get_power_domains = gen11_dsi_get_power_domains;
1534
1535
1536 drm_connector_init(dev, connector, &gen11_dsi_connector_funcs,
1537 DRM_MODE_CONNECTOR_DSI);
1538 drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs);
1539 connector->display_info.subpixel_order = SubPixelHorizontalRGB;
1540 connector->interlace_allowed = false;
1541 connector->doublescan_allowed = false;
1542 intel_connector->get_hw_state = intel_connector_get_hw_state;
1543
1544
1545 intel_connector_attach_encoder(intel_connector, encoder);
1546
1547 mutex_lock(&dev->mode_config.mutex);
1548 fixed_mode = intel_panel_vbt_fixed_mode(intel_connector);
1549 mutex_unlock(&dev->mode_config.mutex);
1550
1551 if (!fixed_mode) {
1552 DRM_ERROR("DSI fixed mode info missing\n");
1553 goto err;
1554 }
1555
1556 intel_panel_init(&intel_connector->panel, fixed_mode, NULL);
1557 intel_panel_setup_backlight(connector, INVALID_PIPE);
1558
1559 if (dev_priv->vbt.dsi.config->dual_link)
1560 intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B);
1561 else
1562 intel_dsi->ports = BIT(port);
1563
1564 intel_dsi->dcs_backlight_ports = dev_priv->vbt.dsi.bl_ports;
1565 intel_dsi->dcs_cabc_ports = dev_priv->vbt.dsi.cabc_ports;
1566
1567 for_each_dsi_port(port, intel_dsi->ports) {
1568 struct intel_dsi_host *host;
1569
1570 host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port);
1571 if (!host)
1572 goto err;
1573
1574 intel_dsi->dsi_hosts[port] = host;
1575 }
1576
1577 if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) {
1578 DRM_DEBUG_KMS("no device found\n");
1579 goto err;
1580 }
1581
1582 icl_dphy_param_init(intel_dsi);
1583 return;
1584
1585err:
1586 drm_encoder_cleanup(&encoder->base);
1587 kfree(intel_dsi);
1588 kfree(intel_connector);
1589}
1590