1
2
3
4
5
6
7
8
9
10
11
12
13#include <linux/module.h>
14#include <linux/platform_device.h>
15#include <linux/err.h>
16#include <linux/clk.h>
17#include <linux/io.h>
18#include <linux/interrupt.h>
19#include <linux/of.h>
20#include <linux/of_gpio.h>
21#include <linux/of_graph.h>
22#include <linux/gpio.h>
23#include <linux/component.h>
24#include <linux/phy/phy.h>
25#include <video/of_display_timing.h>
26#include <video/of_videomode.h>
27
28#include <drm/drmP.h>
29#include <drm/drm_crtc.h>
30#include <drm/drm_crtc_helper.h>
31#include <drm/drm_atomic_helper.h>
32#include <drm/drm_panel.h>
33
34#include "exynos_dp_core.h"
35#include "exynos_drm_crtc.h"
36
37#define ctx_from_connector(c) container_of(c, struct exynos_dp_device, \
38 connector)
39
40static inline struct exynos_drm_crtc *dp_to_crtc(struct exynos_dp_device *dp)
41{
42 return to_exynos_crtc(dp->encoder.crtc);
43}
44
45static inline struct exynos_dp_device *encoder_to_dp(
46 struct drm_encoder *e)
47{
48 return container_of(e, struct exynos_dp_device, encoder);
49}
50
51struct bridge_init {
52 struct i2c_client *client;
53 struct device_node *node;
54};
55
56static void exynos_dp_init_dp(struct exynos_dp_device *dp)
57{
58 exynos_dp_reset(dp);
59
60 exynos_dp_swreset(dp);
61
62 exynos_dp_init_analog_param(dp);
63 exynos_dp_init_interrupt(dp);
64
65
66 exynos_dp_enable_sw_function(dp);
67
68 exynos_dp_config_interrupt(dp);
69 exynos_dp_init_analog_func(dp);
70
71 exynos_dp_init_hpd(dp);
72 exynos_dp_init_aux(dp);
73}
74
75static int exynos_dp_detect_hpd(struct exynos_dp_device *dp)
76{
77 int timeout_loop = 0;
78
79 while (exynos_dp_get_plug_in_status(dp) != 0) {
80 timeout_loop++;
81 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
82 dev_err(dp->dev, "failed to get hpd plug status\n");
83 return -ETIMEDOUT;
84 }
85 usleep_range(10, 11);
86 }
87
88 return 0;
89}
90
91static unsigned char exynos_dp_calc_edid_check_sum(unsigned char *edid_data)
92{
93 int i;
94 unsigned char sum = 0;
95
96 for (i = 0; i < EDID_BLOCK_LENGTH; i++)
97 sum = sum + edid_data[i];
98
99 return sum;
100}
101
102static int exynos_dp_read_edid(struct exynos_dp_device *dp)
103{
104 unsigned char edid[EDID_BLOCK_LENGTH * 2];
105 unsigned int extend_block = 0;
106 unsigned char sum;
107 unsigned char test_vector;
108 int retval;
109
110
111
112
113
114
115
116
117 retval = exynos_dp_read_byte_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
118 EDID_EXTENSION_FLAG,
119 &extend_block);
120 if (retval)
121 return retval;
122
123 if (extend_block > 0) {
124 dev_dbg(dp->dev, "EDID data includes a single extension!\n");
125
126
127 retval = exynos_dp_read_bytes_from_i2c(dp, I2C_EDID_DEVICE_ADDR,
128 EDID_HEADER_PATTERN,
129 EDID_BLOCK_LENGTH,
130 &edid[EDID_HEADER_PATTERN]);
131 if (retval != 0) {
132 dev_err(dp->dev, "EDID Read failed!\n");
133 return -EIO;
134 }
135 sum = exynos_dp_calc_edid_check_sum(edid);
136 if (sum != 0) {
137 dev_err(dp->dev, "EDID bad checksum!\n");
138 return -EIO;
139 }
140
141
142 retval = exynos_dp_read_bytes_from_i2c(dp,
143 I2C_EDID_DEVICE_ADDR,
144 EDID_BLOCK_LENGTH,
145 EDID_BLOCK_LENGTH,
146 &edid[EDID_BLOCK_LENGTH]);
147 if (retval != 0) {
148 dev_err(dp->dev, "EDID Read failed!\n");
149 return -EIO;
150 }
151 sum = exynos_dp_calc_edid_check_sum(&edid[EDID_BLOCK_LENGTH]);
152 if (sum != 0) {
153 dev_err(dp->dev, "EDID bad checksum!\n");
154 return -EIO;
155 }
156
157 exynos_dp_read_byte_from_dpcd(dp, DP_TEST_REQUEST,
158 &test_vector);
159 if (test_vector & DP_TEST_LINK_EDID_READ) {
160 exynos_dp_write_byte_to_dpcd(dp,
161 DP_TEST_EDID_CHECKSUM,
162 edid[EDID_BLOCK_LENGTH + EDID_CHECKSUM]);
163 exynos_dp_write_byte_to_dpcd(dp,
164 DP_TEST_RESPONSE,
165 DP_TEST_EDID_CHECKSUM_WRITE);
166 }
167 } else {
168 dev_info(dp->dev, "EDID data does not include any extensions.\n");
169
170
171 retval = exynos_dp_read_bytes_from_i2c(dp,
172 I2C_EDID_DEVICE_ADDR,
173 EDID_HEADER_PATTERN,
174 EDID_BLOCK_LENGTH,
175 &edid[EDID_HEADER_PATTERN]);
176 if (retval != 0) {
177 dev_err(dp->dev, "EDID Read failed!\n");
178 return -EIO;
179 }
180 sum = exynos_dp_calc_edid_check_sum(edid);
181 if (sum != 0) {
182 dev_err(dp->dev, "EDID bad checksum!\n");
183 return -EIO;
184 }
185
186 exynos_dp_read_byte_from_dpcd(dp,
187 DP_TEST_REQUEST,
188 &test_vector);
189 if (test_vector & DP_TEST_LINK_EDID_READ) {
190 exynos_dp_write_byte_to_dpcd(dp,
191 DP_TEST_EDID_CHECKSUM,
192 edid[EDID_CHECKSUM]);
193 exynos_dp_write_byte_to_dpcd(dp,
194 DP_TEST_RESPONSE,
195 DP_TEST_EDID_CHECKSUM_WRITE);
196 }
197 }
198
199 dev_dbg(dp->dev, "EDID Read success!\n");
200 return 0;
201}
202
203static int exynos_dp_handle_edid(struct exynos_dp_device *dp)
204{
205 u8 buf[12];
206 int i;
207 int retval;
208
209
210 retval = exynos_dp_read_bytes_from_dpcd(dp, DP_DPCD_REV,
211 12, buf);
212 if (retval)
213 return retval;
214
215
216 for (i = 0; i < 3; i++) {
217 retval = exynos_dp_read_edid(dp);
218 if (!retval)
219 break;
220 }
221
222 return retval;
223}
224
225static void exynos_dp_enable_rx_to_enhanced_mode(struct exynos_dp_device *dp,
226 bool enable)
227{
228 u8 data;
229
230 exynos_dp_read_byte_from_dpcd(dp, DP_LANE_COUNT_SET, &data);
231
232 if (enable)
233 exynos_dp_write_byte_to_dpcd(dp, DP_LANE_COUNT_SET,
234 DP_LANE_COUNT_ENHANCED_FRAME_EN |
235 DPCD_LANE_COUNT_SET(data));
236 else
237 exynos_dp_write_byte_to_dpcd(dp, DP_LANE_COUNT_SET,
238 DPCD_LANE_COUNT_SET(data));
239}
240
241static int exynos_dp_is_enhanced_mode_available(struct exynos_dp_device *dp)
242{
243 u8 data;
244 int retval;
245
246 exynos_dp_read_byte_from_dpcd(dp, DP_MAX_LANE_COUNT, &data);
247 retval = DPCD_ENHANCED_FRAME_CAP(data);
248
249 return retval;
250}
251
252static void exynos_dp_set_enhanced_mode(struct exynos_dp_device *dp)
253{
254 u8 data;
255
256 data = exynos_dp_is_enhanced_mode_available(dp);
257 exynos_dp_enable_rx_to_enhanced_mode(dp, data);
258 exynos_dp_enable_enhanced_mode(dp, data);
259}
260
261static void exynos_dp_training_pattern_dis(struct exynos_dp_device *dp)
262{
263 exynos_dp_set_training_pattern(dp, DP_NONE);
264
265 exynos_dp_write_byte_to_dpcd(dp,
266 DP_TRAINING_PATTERN_SET,
267 DP_TRAINING_PATTERN_DISABLE);
268}
269
270static void exynos_dp_set_lane_lane_pre_emphasis(struct exynos_dp_device *dp,
271 int pre_emphasis, int lane)
272{
273 switch (lane) {
274 case 0:
275 exynos_dp_set_lane0_pre_emphasis(dp, pre_emphasis);
276 break;
277 case 1:
278 exynos_dp_set_lane1_pre_emphasis(dp, pre_emphasis);
279 break;
280
281 case 2:
282 exynos_dp_set_lane2_pre_emphasis(dp, pre_emphasis);
283 break;
284
285 case 3:
286 exynos_dp_set_lane3_pre_emphasis(dp, pre_emphasis);
287 break;
288 }
289}
290
291static int exynos_dp_link_start(struct exynos_dp_device *dp)
292{
293 u8 buf[4];
294 int lane, lane_count, pll_tries, retval;
295
296 lane_count = dp->link_train.lane_count;
297
298 dp->link_train.lt_state = CLOCK_RECOVERY;
299 dp->link_train.eq_loop = 0;
300
301 for (lane = 0; lane < lane_count; lane++)
302 dp->link_train.cr_loop[lane] = 0;
303
304
305 exynos_dp_set_link_bandwidth(dp, dp->link_train.link_rate);
306 exynos_dp_set_lane_count(dp, dp->link_train.lane_count);
307
308
309 buf[0] = dp->link_train.link_rate;
310 buf[1] = dp->link_train.lane_count;
311 retval = exynos_dp_write_bytes_to_dpcd(dp, DP_LINK_BW_SET,
312 2, buf);
313 if (retval)
314 return retval;
315
316
317 for (lane = 0; lane < lane_count; lane++)
318 exynos_dp_set_lane_lane_pre_emphasis(dp,
319 PRE_EMPHASIS_LEVEL_0, lane);
320
321
322 pll_tries = 0;
323 while (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
324 if (pll_tries == DP_TIMEOUT_LOOP_COUNT) {
325 dev_err(dp->dev, "Wait for PLL lock timed out\n");
326 return -ETIMEDOUT;
327 }
328
329 pll_tries++;
330 usleep_range(90, 120);
331 }
332
333
334 exynos_dp_set_training_pattern(dp, TRAINING_PTN1);
335
336
337 retval = exynos_dp_write_byte_to_dpcd(dp,
338 DP_TRAINING_PATTERN_SET,
339 DP_LINK_SCRAMBLING_DISABLE | DP_TRAINING_PATTERN_1);
340 if (retval)
341 return retval;
342
343 for (lane = 0; lane < lane_count; lane++)
344 buf[lane] = DP_TRAIN_PRE_EMPH_LEVEL_0 |
345 DP_TRAIN_VOLTAGE_SWING_LEVEL_0;
346
347 retval = exynos_dp_write_bytes_to_dpcd(dp, DP_TRAINING_LANE0_SET,
348 lane_count, buf);
349
350 return retval;
351}
352
353static unsigned char exynos_dp_get_lane_status(u8 link_status[2], int lane)
354{
355 int shift = (lane & 1) * 4;
356 u8 link_value = link_status[lane>>1];
357
358 return (link_value >> shift) & 0xf;
359}
360
361static int exynos_dp_clock_recovery_ok(u8 link_status[2], int lane_count)
362{
363 int lane;
364 u8 lane_status;
365
366 for (lane = 0; lane < lane_count; lane++) {
367 lane_status = exynos_dp_get_lane_status(link_status, lane);
368 if ((lane_status & DP_LANE_CR_DONE) == 0)
369 return -EINVAL;
370 }
371 return 0;
372}
373
374static int exynos_dp_channel_eq_ok(u8 link_status[2], u8 link_align,
375 int lane_count)
376{
377 int lane;
378 u8 lane_status;
379
380 if ((link_align & DP_INTERLANE_ALIGN_DONE) == 0)
381 return -EINVAL;
382
383 for (lane = 0; lane < lane_count; lane++) {
384 lane_status = exynos_dp_get_lane_status(link_status, lane);
385 lane_status &= DP_CHANNEL_EQ_BITS;
386 if (lane_status != DP_CHANNEL_EQ_BITS)
387 return -EINVAL;
388 }
389
390 return 0;
391}
392
393static unsigned char exynos_dp_get_adjust_request_voltage(u8 adjust_request[2],
394 int lane)
395{
396 int shift = (lane & 1) * 4;
397 u8 link_value = adjust_request[lane>>1];
398
399 return (link_value >> shift) & 0x3;
400}
401
402static unsigned char exynos_dp_get_adjust_request_pre_emphasis(
403 u8 adjust_request[2],
404 int lane)
405{
406 int shift = (lane & 1) * 4;
407 u8 link_value = adjust_request[lane>>1];
408
409 return ((link_value >> shift) & 0xc) >> 2;
410}
411
412static void exynos_dp_set_lane_link_training(struct exynos_dp_device *dp,
413 u8 training_lane_set, int lane)
414{
415 switch (lane) {
416 case 0:
417 exynos_dp_set_lane0_link_training(dp, training_lane_set);
418 break;
419 case 1:
420 exynos_dp_set_lane1_link_training(dp, training_lane_set);
421 break;
422
423 case 2:
424 exynos_dp_set_lane2_link_training(dp, training_lane_set);
425 break;
426
427 case 3:
428 exynos_dp_set_lane3_link_training(dp, training_lane_set);
429 break;
430 }
431}
432
433static unsigned int exynos_dp_get_lane_link_training(
434 struct exynos_dp_device *dp,
435 int lane)
436{
437 u32 reg;
438
439 switch (lane) {
440 case 0:
441 reg = exynos_dp_get_lane0_link_training(dp);
442 break;
443 case 1:
444 reg = exynos_dp_get_lane1_link_training(dp);
445 break;
446 case 2:
447 reg = exynos_dp_get_lane2_link_training(dp);
448 break;
449 case 3:
450 reg = exynos_dp_get_lane3_link_training(dp);
451 break;
452 default:
453 WARN_ON(1);
454 return 0;
455 }
456
457 return reg;
458}
459
460static void exynos_dp_reduce_link_rate(struct exynos_dp_device *dp)
461{
462 exynos_dp_training_pattern_dis(dp);
463 exynos_dp_set_enhanced_mode(dp);
464
465 dp->link_train.lt_state = FAILED;
466}
467
468static void exynos_dp_get_adjust_training_lane(struct exynos_dp_device *dp,
469 u8 adjust_request[2])
470{
471 int lane, lane_count;
472 u8 voltage_swing, pre_emphasis, training_lane;
473
474 lane_count = dp->link_train.lane_count;
475 for (lane = 0; lane < lane_count; lane++) {
476 voltage_swing = exynos_dp_get_adjust_request_voltage(
477 adjust_request, lane);
478 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
479 adjust_request, lane);
480 training_lane = DPCD_VOLTAGE_SWING_SET(voltage_swing) |
481 DPCD_PRE_EMPHASIS_SET(pre_emphasis);
482
483 if (voltage_swing == VOLTAGE_LEVEL_3)
484 training_lane |= DP_TRAIN_MAX_SWING_REACHED;
485 if (pre_emphasis == PRE_EMPHASIS_LEVEL_3)
486 training_lane |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
487
488 dp->link_train.training_lane[lane] = training_lane;
489 }
490}
491
492static int exynos_dp_process_clock_recovery(struct exynos_dp_device *dp)
493{
494 int lane, lane_count, retval;
495 u8 voltage_swing, pre_emphasis, training_lane;
496 u8 link_status[2], adjust_request[2];
497
498 usleep_range(100, 101);
499
500 lane_count = dp->link_train.lane_count;
501
502 retval = exynos_dp_read_bytes_from_dpcd(dp,
503 DP_LANE0_1_STATUS, 2, link_status);
504 if (retval)
505 return retval;
506
507 retval = exynos_dp_read_bytes_from_dpcd(dp,
508 DP_ADJUST_REQUEST_LANE0_1, 2, adjust_request);
509 if (retval)
510 return retval;
511
512 if (exynos_dp_clock_recovery_ok(link_status, lane_count) == 0) {
513
514 exynos_dp_set_training_pattern(dp, TRAINING_PTN2);
515
516 retval = exynos_dp_write_byte_to_dpcd(dp,
517 DP_TRAINING_PATTERN_SET,
518 DP_LINK_SCRAMBLING_DISABLE |
519 DP_TRAINING_PATTERN_2);
520 if (retval)
521 return retval;
522
523 dev_info(dp->dev, "Link Training Clock Recovery success\n");
524 dp->link_train.lt_state = EQUALIZER_TRAINING;
525 } else {
526 for (lane = 0; lane < lane_count; lane++) {
527 training_lane = exynos_dp_get_lane_link_training(
528 dp, lane);
529 voltage_swing = exynos_dp_get_adjust_request_voltage(
530 adjust_request, lane);
531 pre_emphasis = exynos_dp_get_adjust_request_pre_emphasis(
532 adjust_request, lane);
533
534 if (DPCD_VOLTAGE_SWING_GET(training_lane) ==
535 voltage_swing &&
536 DPCD_PRE_EMPHASIS_GET(training_lane) ==
537 pre_emphasis)
538 dp->link_train.cr_loop[lane]++;
539
540 if (dp->link_train.cr_loop[lane] == MAX_CR_LOOP ||
541 voltage_swing == VOLTAGE_LEVEL_3 ||
542 pre_emphasis == PRE_EMPHASIS_LEVEL_3) {
543 dev_err(dp->dev, "CR Max reached (%d,%d,%d)\n",
544 dp->link_train.cr_loop[lane],
545 voltage_swing, pre_emphasis);
546 exynos_dp_reduce_link_rate(dp);
547 return -EIO;
548 }
549 }
550 }
551
552 exynos_dp_get_adjust_training_lane(dp, adjust_request);
553
554 for (lane = 0; lane < lane_count; lane++)
555 exynos_dp_set_lane_link_training(dp,
556 dp->link_train.training_lane[lane], lane);
557
558 retval = exynos_dp_write_bytes_to_dpcd(dp,
559 DP_TRAINING_LANE0_SET, lane_count,
560 dp->link_train.training_lane);
561 if (retval)
562 return retval;
563
564 return retval;
565}
566
567static int exynos_dp_process_equalizer_training(struct exynos_dp_device *dp)
568{
569 int lane, lane_count, retval;
570 u32 reg;
571 u8 link_align, link_status[2], adjust_request[2];
572
573 usleep_range(400, 401);
574
575 lane_count = dp->link_train.lane_count;
576
577 retval = exynos_dp_read_bytes_from_dpcd(dp,
578 DP_LANE0_1_STATUS, 2, link_status);
579 if (retval)
580 return retval;
581
582 if (exynos_dp_clock_recovery_ok(link_status, lane_count)) {
583 exynos_dp_reduce_link_rate(dp);
584 return -EIO;
585 }
586
587 retval = exynos_dp_read_bytes_from_dpcd(dp,
588 DP_ADJUST_REQUEST_LANE0_1, 2, adjust_request);
589 if (retval)
590 return retval;
591
592 retval = exynos_dp_read_byte_from_dpcd(dp,
593 DP_LANE_ALIGN_STATUS_UPDATED, &link_align);
594 if (retval)
595 return retval;
596
597 exynos_dp_get_adjust_training_lane(dp, adjust_request);
598
599 if (!exynos_dp_channel_eq_ok(link_status, link_align, lane_count)) {
600
601 exynos_dp_training_pattern_dis(dp);
602
603 dev_info(dp->dev, "Link Training success!\n");
604
605 exynos_dp_get_link_bandwidth(dp, ®);
606 dp->link_train.link_rate = reg;
607 dev_dbg(dp->dev, "final bandwidth = %.2x\n",
608 dp->link_train.link_rate);
609
610 exynos_dp_get_lane_count(dp, ®);
611 dp->link_train.lane_count = reg;
612 dev_dbg(dp->dev, "final lane count = %.2x\n",
613 dp->link_train.lane_count);
614
615
616 exynos_dp_set_enhanced_mode(dp);
617 dp->link_train.lt_state = FINISHED;
618
619 return 0;
620 }
621
622
623 dp->link_train.eq_loop++;
624
625 if (dp->link_train.eq_loop > MAX_EQ_LOOP) {
626 dev_err(dp->dev, "EQ Max loop\n");
627 exynos_dp_reduce_link_rate(dp);
628 return -EIO;
629 }
630
631 for (lane = 0; lane < lane_count; lane++)
632 exynos_dp_set_lane_link_training(dp,
633 dp->link_train.training_lane[lane], lane);
634
635 retval = exynos_dp_write_bytes_to_dpcd(dp, DP_TRAINING_LANE0_SET,
636 lane_count, dp->link_train.training_lane);
637
638 return retval;
639}
640
641static void exynos_dp_get_max_rx_bandwidth(struct exynos_dp_device *dp,
642 u8 *bandwidth)
643{
644 u8 data;
645
646
647
648
649
650 exynos_dp_read_byte_from_dpcd(dp, DP_MAX_LINK_RATE, &data);
651 *bandwidth = data;
652}
653
654static void exynos_dp_get_max_rx_lane_count(struct exynos_dp_device *dp,
655 u8 *lane_count)
656{
657 u8 data;
658
659
660
661
662
663 exynos_dp_read_byte_from_dpcd(dp, DP_MAX_LANE_COUNT, &data);
664 *lane_count = DPCD_MAX_LANE_COUNT(data);
665}
666
667static void exynos_dp_init_training(struct exynos_dp_device *dp,
668 enum link_lane_count_type max_lane,
669 enum link_rate_type max_rate)
670{
671
672
673
674
675 exynos_dp_reset_macro(dp);
676
677
678 exynos_dp_get_max_rx_bandwidth(dp, &dp->link_train.link_rate);
679 exynos_dp_get_max_rx_lane_count(dp, &dp->link_train.lane_count);
680
681 if ((dp->link_train.link_rate != LINK_RATE_1_62GBPS) &&
682 (dp->link_train.link_rate != LINK_RATE_2_70GBPS)) {
683 dev_err(dp->dev, "Rx Max Link Rate is abnormal :%x !\n",
684 dp->link_train.link_rate);
685 dp->link_train.link_rate = LINK_RATE_1_62GBPS;
686 }
687
688 if (dp->link_train.lane_count == 0) {
689 dev_err(dp->dev, "Rx Max Lane count is abnormal :%x !\n",
690 dp->link_train.lane_count);
691 dp->link_train.lane_count = (u8)LANE_COUNT1;
692 }
693
694
695 if (dp->link_train.lane_count > max_lane)
696 dp->link_train.lane_count = max_lane;
697 if (dp->link_train.link_rate > max_rate)
698 dp->link_train.link_rate = max_rate;
699
700
701 exynos_dp_set_analog_power_down(dp, POWER_ALL, 0);
702}
703
704static int exynos_dp_sw_link_training(struct exynos_dp_device *dp)
705{
706 int retval = 0, training_finished = 0;
707
708 dp->link_train.lt_state = START;
709
710
711 while (!retval && !training_finished) {
712 switch (dp->link_train.lt_state) {
713 case START:
714 retval = exynos_dp_link_start(dp);
715 if (retval)
716 dev_err(dp->dev, "LT link start failed!\n");
717 break;
718 case CLOCK_RECOVERY:
719 retval = exynos_dp_process_clock_recovery(dp);
720 if (retval)
721 dev_err(dp->dev, "LT CR failed!\n");
722 break;
723 case EQUALIZER_TRAINING:
724 retval = exynos_dp_process_equalizer_training(dp);
725 if (retval)
726 dev_err(dp->dev, "LT EQ failed!\n");
727 break;
728 case FINISHED:
729 training_finished = 1;
730 break;
731 case FAILED:
732 return -EREMOTEIO;
733 }
734 }
735 if (retval)
736 dev_err(dp->dev, "eDP link training failed (%d)\n", retval);
737
738 return retval;
739}
740
741static int exynos_dp_set_link_train(struct exynos_dp_device *dp,
742 u32 count,
743 u32 bwtype)
744{
745 int i;
746 int retval;
747
748 for (i = 0; i < DP_TIMEOUT_LOOP_COUNT; i++) {
749 exynos_dp_init_training(dp, count, bwtype);
750 retval = exynos_dp_sw_link_training(dp);
751 if (retval == 0)
752 break;
753
754 usleep_range(100, 110);
755 }
756
757 return retval;
758}
759
760static int exynos_dp_config_video(struct exynos_dp_device *dp)
761{
762 int retval = 0;
763 int timeout_loop = 0;
764 int done_count = 0;
765
766 exynos_dp_config_video_slave_mode(dp);
767
768 exynos_dp_set_video_color_format(dp);
769
770 if (exynos_dp_get_pll_lock_status(dp) == PLL_UNLOCKED) {
771 dev_err(dp->dev, "PLL is not locked yet.\n");
772 return -EINVAL;
773 }
774
775 for (;;) {
776 timeout_loop++;
777 if (exynos_dp_is_slave_video_stream_clock_on(dp) == 0)
778 break;
779 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
780 dev_err(dp->dev, "Timeout of video streamclk ok\n");
781 return -ETIMEDOUT;
782 }
783
784 usleep_range(1, 2);
785 }
786
787
788 exynos_dp_set_video_cr_mn(dp, CALCULATED_M, 0, 0);
789
790
791 exynos_dp_set_video_timing_mode(dp, VIDEO_TIMING_FROM_CAPTURE);
792
793
794 exynos_dp_enable_video_mute(dp, 0);
795
796
797 exynos_dp_enable_video_master(dp, 0);
798
799 timeout_loop = 0;
800
801 for (;;) {
802 timeout_loop++;
803 if (exynos_dp_is_video_stream_on(dp) == 0) {
804 done_count++;
805 if (done_count > 10)
806 break;
807 } else if (done_count) {
808 done_count = 0;
809 }
810 if (DP_TIMEOUT_LOOP_COUNT < timeout_loop) {
811 dev_err(dp->dev, "Timeout of video streamclk ok\n");
812 return -ETIMEDOUT;
813 }
814
815 usleep_range(1000, 1001);
816 }
817
818 if (retval != 0)
819 dev_err(dp->dev, "Video stream is not detected!\n");
820
821 return retval;
822}
823
824static void exynos_dp_enable_scramble(struct exynos_dp_device *dp, bool enable)
825{
826 u8 data;
827
828 if (enable) {
829 exynos_dp_enable_scrambling(dp);
830
831 exynos_dp_read_byte_from_dpcd(dp,
832 DP_TRAINING_PATTERN_SET,
833 &data);
834 exynos_dp_write_byte_to_dpcd(dp,
835 DP_TRAINING_PATTERN_SET,
836 (u8)(data & ~DP_LINK_SCRAMBLING_DISABLE));
837 } else {
838 exynos_dp_disable_scrambling(dp);
839
840 exynos_dp_read_byte_from_dpcd(dp,
841 DP_TRAINING_PATTERN_SET,
842 &data);
843 exynos_dp_write_byte_to_dpcd(dp,
844 DP_TRAINING_PATTERN_SET,
845 (u8)(data | DP_LINK_SCRAMBLING_DISABLE));
846 }
847}
848
849static irqreturn_t exynos_dp_irq_handler(int irq, void *arg)
850{
851 struct exynos_dp_device *dp = arg;
852
853 enum dp_irq_type irq_type;
854
855 irq_type = exynos_dp_get_irq_type(dp);
856 switch (irq_type) {
857 case DP_IRQ_TYPE_HP_CABLE_IN:
858 dev_dbg(dp->dev, "Received irq - cable in\n");
859 schedule_work(&dp->hotplug_work);
860 exynos_dp_clear_hotplug_interrupts(dp);
861 break;
862 case DP_IRQ_TYPE_HP_CABLE_OUT:
863 dev_dbg(dp->dev, "Received irq - cable out\n");
864 exynos_dp_clear_hotplug_interrupts(dp);
865 break;
866 case DP_IRQ_TYPE_HP_CHANGE:
867
868
869
870
871
872 dev_dbg(dp->dev, "Received irq - hotplug change; ignoring.\n");
873 exynos_dp_clear_hotplug_interrupts(dp);
874 break;
875 default:
876 dev_err(dp->dev, "Received irq - unknown type!\n");
877 break;
878 }
879 return IRQ_HANDLED;
880}
881
882static void exynos_dp_hotplug(struct work_struct *work)
883{
884 struct exynos_dp_device *dp;
885
886 dp = container_of(work, struct exynos_dp_device, hotplug_work);
887
888 if (dp->drm_dev)
889 drm_helper_hpd_irq_event(dp->drm_dev);
890}
891
892static void exynos_dp_commit(struct drm_encoder *encoder)
893{
894 struct exynos_dp_device *dp = encoder_to_dp(encoder);
895 int ret;
896
897
898 if (dp->panel) {
899 if (drm_panel_disable(dp->panel))
900 DRM_ERROR("failed to disable the panel\n");
901 }
902
903 ret = exynos_dp_detect_hpd(dp);
904 if (ret) {
905
906 return;
907 }
908
909 ret = exynos_dp_handle_edid(dp);
910 if (ret) {
911 dev_err(dp->dev, "unable to handle edid\n");
912 return;
913 }
914
915 ret = exynos_dp_set_link_train(dp, dp->video_info->lane_count,
916 dp->video_info->link_rate);
917 if (ret) {
918 dev_err(dp->dev, "unable to do link train\n");
919 return;
920 }
921
922 exynos_dp_enable_scramble(dp, 1);
923 exynos_dp_enable_rx_to_enhanced_mode(dp, 1);
924 exynos_dp_enable_enhanced_mode(dp, 1);
925
926 exynos_dp_set_lane_count(dp, dp->video_info->lane_count);
927 exynos_dp_set_link_bandwidth(dp, dp->video_info->link_rate);
928
929 exynos_dp_init_video(dp);
930 ret = exynos_dp_config_video(dp);
931 if (ret)
932 dev_err(dp->dev, "unable to config video\n");
933
934
935 if (dp->panel) {
936 if (drm_panel_enable(dp->panel))
937 DRM_ERROR("failed to enable the panel\n");
938 }
939
940
941 exynos_dp_start_video(dp);
942}
943
944static enum drm_connector_status exynos_dp_detect(
945 struct drm_connector *connector, bool force)
946{
947 return connector_status_connected;
948}
949
950static void exynos_dp_connector_destroy(struct drm_connector *connector)
951{
952 drm_connector_unregister(connector);
953 drm_connector_cleanup(connector);
954}
955
956static const struct drm_connector_funcs exynos_dp_connector_funcs = {
957 .dpms = drm_atomic_helper_connector_dpms,
958 .fill_modes = drm_helper_probe_single_connector_modes,
959 .detect = exynos_dp_detect,
960 .destroy = exynos_dp_connector_destroy,
961 .reset = drm_atomic_helper_connector_reset,
962 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
963 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
964};
965
966static int exynos_dp_get_modes(struct drm_connector *connector)
967{
968 struct exynos_dp_device *dp = ctx_from_connector(connector);
969 struct drm_display_mode *mode;
970
971 if (dp->panel)
972 return drm_panel_get_modes(dp->panel);
973
974 mode = drm_mode_create(connector->dev);
975 if (!mode) {
976 DRM_ERROR("failed to create a new display mode.\n");
977 return 0;
978 }
979
980 drm_display_mode_from_videomode(&dp->vm, mode);
981 connector->display_info.width_mm = mode->width_mm;
982 connector->display_info.height_mm = mode->height_mm;
983
984 mode->type = DRM_MODE_TYPE_DRIVER | DRM_MODE_TYPE_PREFERRED;
985 drm_mode_set_name(mode);
986 drm_mode_probed_add(connector, mode);
987
988 return 1;
989}
990
991static struct drm_encoder *exynos_dp_best_encoder(
992 struct drm_connector *connector)
993{
994 struct exynos_dp_device *dp = ctx_from_connector(connector);
995
996 return &dp->encoder;
997}
998
999static const struct drm_connector_helper_funcs exynos_dp_connector_helper_funcs = {
1000 .get_modes = exynos_dp_get_modes,
1001 .best_encoder = exynos_dp_best_encoder,
1002};
1003
1004
1005static int exynos_drm_attach_lcd_bridge(struct exynos_dp_device *dp,
1006 struct drm_encoder *encoder)
1007{
1008 int ret;
1009
1010 encoder->bridge->next = dp->ptn_bridge;
1011 dp->ptn_bridge->encoder = encoder;
1012 ret = drm_bridge_attach(encoder->dev, dp->ptn_bridge);
1013 if (ret) {
1014 DRM_ERROR("Failed to attach bridge to drm\n");
1015 return ret;
1016 }
1017
1018 return 0;
1019}
1020
1021static int exynos_dp_bridge_attach(struct drm_bridge *bridge)
1022{
1023 struct exynos_dp_device *dp = bridge->driver_private;
1024 struct drm_encoder *encoder = &dp->encoder;
1025 struct drm_connector *connector = &dp->connector;
1026 int ret;
1027
1028
1029 if (dp->ptn_bridge) {
1030 ret = exynos_drm_attach_lcd_bridge(dp, encoder);
1031 if (!ret)
1032 return 0;
1033 }
1034
1035 connector->polled = DRM_CONNECTOR_POLL_HPD;
1036
1037 ret = drm_connector_init(dp->drm_dev, connector,
1038 &exynos_dp_connector_funcs, DRM_MODE_CONNECTOR_eDP);
1039 if (ret) {
1040 DRM_ERROR("Failed to initialize connector with drm\n");
1041 return ret;
1042 }
1043
1044 drm_connector_helper_add(connector, &exynos_dp_connector_helper_funcs);
1045 drm_connector_register(connector);
1046 drm_mode_connector_attach_encoder(connector, encoder);
1047
1048 if (dp->panel)
1049 ret = drm_panel_attach(dp->panel, &dp->connector);
1050
1051 return ret;
1052}
1053
1054static void exynos_dp_bridge_enable(struct drm_bridge *bridge)
1055{
1056 struct exynos_dp_device *dp = bridge->driver_private;
1057 struct exynos_drm_crtc *crtc = dp_to_crtc(dp);
1058
1059 if (dp->dpms_mode == DRM_MODE_DPMS_ON)
1060 return;
1061
1062 pm_runtime_get_sync(dp->dev);
1063
1064 if (dp->panel) {
1065 if (drm_panel_prepare(dp->panel)) {
1066 DRM_ERROR("failed to setup the panel\n");
1067 return;
1068 }
1069 }
1070
1071 if (crtc->ops->clock_enable)
1072 crtc->ops->clock_enable(dp_to_crtc(dp), true);
1073
1074 phy_power_on(dp->phy);
1075 exynos_dp_init_dp(dp);
1076 enable_irq(dp->irq);
1077 exynos_dp_commit(&dp->encoder);
1078
1079 dp->dpms_mode = DRM_MODE_DPMS_ON;
1080}
1081
1082static void exynos_dp_bridge_disable(struct drm_bridge *bridge)
1083{
1084 struct exynos_dp_device *dp = bridge->driver_private;
1085 struct exynos_drm_crtc *crtc = dp_to_crtc(dp);
1086
1087 if (dp->dpms_mode != DRM_MODE_DPMS_ON)
1088 return;
1089
1090 if (dp->panel) {
1091 if (drm_panel_disable(dp->panel)) {
1092 DRM_ERROR("failed to disable the panel\n");
1093 return;
1094 }
1095 }
1096
1097 disable_irq(dp->irq);
1098 flush_work(&dp->hotplug_work);
1099 phy_power_off(dp->phy);
1100
1101 if (crtc->ops->clock_enable)
1102 crtc->ops->clock_enable(dp_to_crtc(dp), false);
1103
1104 if (dp->panel) {
1105 if (drm_panel_unprepare(dp->panel))
1106 DRM_ERROR("failed to turnoff the panel\n");
1107 }
1108
1109 pm_runtime_put_sync(dp->dev);
1110
1111 dp->dpms_mode = DRM_MODE_DPMS_OFF;
1112}
1113
1114static void exynos_dp_bridge_nop(struct drm_bridge *bridge)
1115{
1116
1117}
1118
1119static const struct drm_bridge_funcs exynos_dp_bridge_funcs = {
1120 .enable = exynos_dp_bridge_enable,
1121 .disable = exynos_dp_bridge_disable,
1122 .pre_enable = exynos_dp_bridge_nop,
1123 .post_disable = exynos_dp_bridge_nop,
1124 .attach = exynos_dp_bridge_attach,
1125};
1126
1127static int exynos_dp_create_connector(struct drm_encoder *encoder)
1128{
1129 struct exynos_dp_device *dp = encoder_to_dp(encoder);
1130 struct drm_device *drm_dev = dp->drm_dev;
1131 struct drm_bridge *bridge;
1132 int ret;
1133
1134 bridge = devm_kzalloc(drm_dev->dev, sizeof(*bridge), GFP_KERNEL);
1135 if (!bridge) {
1136 DRM_ERROR("failed to allocate for drm bridge\n");
1137 return -ENOMEM;
1138 }
1139
1140 dp->bridge = bridge;
1141
1142 encoder->bridge = bridge;
1143 bridge->driver_private = dp;
1144 bridge->encoder = encoder;
1145 bridge->funcs = &exynos_dp_bridge_funcs;
1146
1147 ret = drm_bridge_attach(drm_dev, bridge);
1148 if (ret) {
1149 DRM_ERROR("failed to attach drm bridge\n");
1150 return -EINVAL;
1151 }
1152
1153 return 0;
1154}
1155
1156static void exynos_dp_mode_set(struct drm_encoder *encoder,
1157 struct drm_display_mode *mode,
1158 struct drm_display_mode *adjusted_mode)
1159{
1160}
1161
1162static void exynos_dp_enable(struct drm_encoder *encoder)
1163{
1164}
1165
1166static void exynos_dp_disable(struct drm_encoder *encoder)
1167{
1168}
1169
1170static const struct drm_encoder_helper_funcs exynos_dp_encoder_helper_funcs = {
1171 .mode_set = exynos_dp_mode_set,
1172 .enable = exynos_dp_enable,
1173 .disable = exynos_dp_disable,
1174};
1175
1176static const struct drm_encoder_funcs exynos_dp_encoder_funcs = {
1177 .destroy = drm_encoder_cleanup,
1178};
1179
1180static struct video_info *exynos_dp_dt_parse_pdata(struct device *dev)
1181{
1182 struct device_node *dp_node = dev->of_node;
1183 struct video_info *dp_video_config;
1184
1185 dp_video_config = devm_kzalloc(dev,
1186 sizeof(*dp_video_config), GFP_KERNEL);
1187 if (!dp_video_config)
1188 return ERR_PTR(-ENOMEM);
1189
1190 dp_video_config->h_sync_polarity =
1191 of_property_read_bool(dp_node, "hsync-active-high");
1192
1193 dp_video_config->v_sync_polarity =
1194 of_property_read_bool(dp_node, "vsync-active-high");
1195
1196 dp_video_config->interlaced =
1197 of_property_read_bool(dp_node, "interlaced");
1198
1199 if (of_property_read_u32(dp_node, "samsung,color-space",
1200 &dp_video_config->color_space)) {
1201 dev_err(dev, "failed to get color-space\n");
1202 return ERR_PTR(-EINVAL);
1203 }
1204
1205 if (of_property_read_u32(dp_node, "samsung,dynamic-range",
1206 &dp_video_config->dynamic_range)) {
1207 dev_err(dev, "failed to get dynamic-range\n");
1208 return ERR_PTR(-EINVAL);
1209 }
1210
1211 if (of_property_read_u32(dp_node, "samsung,ycbcr-coeff",
1212 &dp_video_config->ycbcr_coeff)) {
1213 dev_err(dev, "failed to get ycbcr-coeff\n");
1214 return ERR_PTR(-EINVAL);
1215 }
1216
1217 if (of_property_read_u32(dp_node, "samsung,color-depth",
1218 &dp_video_config->color_depth)) {
1219 dev_err(dev, "failed to get color-depth\n");
1220 return ERR_PTR(-EINVAL);
1221 }
1222
1223 if (of_property_read_u32(dp_node, "samsung,link-rate",
1224 &dp_video_config->link_rate)) {
1225 dev_err(dev, "failed to get link-rate\n");
1226 return ERR_PTR(-EINVAL);
1227 }
1228
1229 if (of_property_read_u32(dp_node, "samsung,lane-count",
1230 &dp_video_config->lane_count)) {
1231 dev_err(dev, "failed to get lane-count\n");
1232 return ERR_PTR(-EINVAL);
1233 }
1234
1235 return dp_video_config;
1236}
1237
1238static int exynos_dp_dt_parse_panel(struct exynos_dp_device *dp)
1239{
1240 int ret;
1241
1242 ret = of_get_videomode(dp->dev->of_node, &dp->vm, OF_USE_NATIVE_MODE);
1243 if (ret) {
1244 DRM_ERROR("failed: of_get_videomode() : %d\n", ret);
1245 return ret;
1246 }
1247 return 0;
1248}
1249
1250static int exynos_dp_bind(struct device *dev, struct device *master, void *data)
1251{
1252 struct exynos_dp_device *dp = dev_get_drvdata(dev);
1253 struct platform_device *pdev = to_platform_device(dev);
1254 struct drm_device *drm_dev = data;
1255 struct drm_encoder *encoder = &dp->encoder;
1256 struct resource *res;
1257 unsigned int irq_flags;
1258 int pipe, ret = 0;
1259
1260 dp->dev = &pdev->dev;
1261 dp->dpms_mode = DRM_MODE_DPMS_OFF;
1262
1263 dp->video_info = exynos_dp_dt_parse_pdata(&pdev->dev);
1264 if (IS_ERR(dp->video_info))
1265 return PTR_ERR(dp->video_info);
1266
1267 dp->phy = devm_phy_get(dp->dev, "dp");
1268 if (IS_ERR(dp->phy)) {
1269 dev_err(dp->dev, "no DP phy configured\n");
1270 ret = PTR_ERR(dp->phy);
1271 if (ret) {
1272
1273
1274
1275
1276 if (ret == -ENOSYS || ret == -ENODEV)
1277 dp->phy = NULL;
1278 else
1279 return ret;
1280 }
1281 }
1282
1283 if (!dp->panel && !dp->ptn_bridge) {
1284 ret = exynos_dp_dt_parse_panel(dp);
1285 if (ret)
1286 return ret;
1287 }
1288
1289 dp->clock = devm_clk_get(&pdev->dev, "dp");
1290 if (IS_ERR(dp->clock)) {
1291 dev_err(&pdev->dev, "failed to get clock\n");
1292 return PTR_ERR(dp->clock);
1293 }
1294
1295 clk_prepare_enable(dp->clock);
1296
1297 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1298
1299 dp->reg_base = devm_ioremap_resource(&pdev->dev, res);
1300 if (IS_ERR(dp->reg_base))
1301 return PTR_ERR(dp->reg_base);
1302
1303 dp->hpd_gpio = of_get_named_gpio(dev->of_node, "samsung,hpd-gpio", 0);
1304
1305 if (gpio_is_valid(dp->hpd_gpio)) {
1306
1307
1308
1309
1310
1311
1312
1313 ret = devm_gpio_request_one(&pdev->dev, dp->hpd_gpio, GPIOF_IN,
1314 "hpd_gpio");
1315 if (ret) {
1316 dev_err(&pdev->dev, "failed to get hpd gpio\n");
1317 return ret;
1318 }
1319 dp->irq = gpio_to_irq(dp->hpd_gpio);
1320 irq_flags = IRQF_TRIGGER_RISING | IRQF_TRIGGER_FALLING;
1321 } else {
1322 dp->hpd_gpio = -ENODEV;
1323 dp->irq = platform_get_irq(pdev, 0);
1324 irq_flags = 0;
1325 }
1326
1327 if (dp->irq == -ENXIO) {
1328 dev_err(&pdev->dev, "failed to get irq\n");
1329 return -ENODEV;
1330 }
1331
1332 INIT_WORK(&dp->hotplug_work, exynos_dp_hotplug);
1333
1334 ret = devm_request_irq(&pdev->dev, dp->irq, exynos_dp_irq_handler,
1335 irq_flags, "exynos-dp", dp);
1336 if (ret) {
1337 dev_err(&pdev->dev, "failed to request irq\n");
1338 return ret;
1339 }
1340 disable_irq(dp->irq);
1341
1342 dp->drm_dev = drm_dev;
1343
1344 pipe = exynos_drm_crtc_get_pipe_from_type(drm_dev,
1345 EXYNOS_DISPLAY_TYPE_LCD);
1346 if (pipe < 0)
1347 return pipe;
1348
1349 encoder->possible_crtcs = 1 << pipe;
1350
1351 DRM_DEBUG_KMS("possible_crtcs = 0x%x\n", encoder->possible_crtcs);
1352
1353 drm_encoder_init(drm_dev, encoder, &exynos_dp_encoder_funcs,
1354 DRM_MODE_ENCODER_TMDS, NULL);
1355
1356 drm_encoder_helper_add(encoder, &exynos_dp_encoder_helper_funcs);
1357
1358 ret = exynos_dp_create_connector(encoder);
1359 if (ret) {
1360 DRM_ERROR("failed to create connector ret = %d\n", ret);
1361 drm_encoder_cleanup(encoder);
1362 return ret;
1363 }
1364
1365 return 0;
1366}
1367
1368static void exynos_dp_unbind(struct device *dev, struct device *master,
1369 void *data)
1370{
1371 struct exynos_dp_device *dp = dev_get_drvdata(dev);
1372
1373 exynos_dp_disable(&dp->encoder);
1374}
1375
1376static const struct component_ops exynos_dp_ops = {
1377 .bind = exynos_dp_bind,
1378 .unbind = exynos_dp_unbind,
1379};
1380
1381static int exynos_dp_probe(struct platform_device *pdev)
1382{
1383 struct device *dev = &pdev->dev;
1384 struct device_node *np = NULL, *endpoint = NULL;
1385 struct exynos_dp_device *dp;
1386 int ret;
1387
1388 dp = devm_kzalloc(&pdev->dev, sizeof(struct exynos_dp_device),
1389 GFP_KERNEL);
1390 if (!dp)
1391 return -ENOMEM;
1392
1393 platform_set_drvdata(pdev, dp);
1394
1395
1396 np = of_parse_phandle(dev->of_node, "panel", 0);
1397 if (np) {
1398 dp->panel = of_drm_find_panel(np);
1399 of_node_put(np);
1400 if (!dp->panel)
1401 return -EPROBE_DEFER;
1402 goto out;
1403 }
1404
1405 endpoint = of_graph_get_next_endpoint(dev->of_node, NULL);
1406 if (endpoint) {
1407 np = of_graph_get_remote_port_parent(endpoint);
1408 if (np) {
1409
1410 dp->panel = of_drm_find_panel(np);
1411 if (!dp->panel) {
1412 dp->ptn_bridge = of_drm_find_bridge(np);
1413 if (!dp->ptn_bridge) {
1414 of_node_put(np);
1415 return -EPROBE_DEFER;
1416 }
1417 }
1418 of_node_put(np);
1419 } else {
1420 DRM_ERROR("no remote endpoint device node found.\n");
1421 return -EINVAL;
1422 }
1423 } else {
1424 DRM_ERROR("no port endpoint subnode found.\n");
1425 return -EINVAL;
1426 }
1427
1428out:
1429 pm_runtime_enable(dev);
1430
1431 ret = component_add(&pdev->dev, &exynos_dp_ops);
1432 if (ret)
1433 goto err_disable_pm_runtime;
1434
1435 return ret;
1436
1437err_disable_pm_runtime:
1438 pm_runtime_disable(dev);
1439
1440 return ret;
1441}
1442
1443static int exynos_dp_remove(struct platform_device *pdev)
1444{
1445 pm_runtime_disable(&pdev->dev);
1446 component_del(&pdev->dev, &exynos_dp_ops);
1447
1448 return 0;
1449}
1450
1451#ifdef CONFIG_PM
1452static int exynos_dp_suspend(struct device *dev)
1453{
1454 struct exynos_dp_device *dp = dev_get_drvdata(dev);
1455
1456 clk_disable_unprepare(dp->clock);
1457
1458 return 0;
1459}
1460
1461static int exynos_dp_resume(struct device *dev)
1462{
1463 struct exynos_dp_device *dp = dev_get_drvdata(dev);
1464 int ret;
1465
1466 ret = clk_prepare_enable(dp->clock);
1467 if (ret < 0) {
1468 DRM_ERROR("Failed to prepare_enable the clock clk [%d]\n", ret);
1469 return ret;
1470 }
1471
1472 return 0;
1473}
1474#endif
1475
1476static const struct dev_pm_ops exynos_dp_pm_ops = {
1477 SET_RUNTIME_PM_OPS(exynos_dp_suspend, exynos_dp_resume, NULL)
1478};
1479
1480static const struct of_device_id exynos_dp_match[] = {
1481 { .compatible = "samsung,exynos5-dp" },
1482 {},
1483};
1484MODULE_DEVICE_TABLE(of, exynos_dp_match);
1485
1486struct platform_driver dp_driver = {
1487 .probe = exynos_dp_probe,
1488 .remove = exynos_dp_remove,
1489 .driver = {
1490 .name = "exynos-dp",
1491 .owner = THIS_MODULE,
1492 .pm = &exynos_dp_pm_ops,
1493 .of_match_table = exynos_dp_match,
1494 },
1495};
1496
1497MODULE_AUTHOR("Jingoo Han <jg1.han@samsung.com>");
1498MODULE_DESCRIPTION("Samsung SoC DP Driver");
1499MODULE_LICENSE("GPL v2");
1500