1
2
3
4
5
6
7#include <drm/drmP.h>
8#include <drm/drm_atomic_helper.h>
9#include <drm/drm_dp_helper.h>
10#include <drm/drm_edid.h>
11#include <drm/drm_of.h>
12#include <drm/drm_probe_helper.h>
13
14#include <linux/clk.h>
15#include <linux/component.h>
16#include <linux/extcon.h>
17#include <linux/firmware.h>
18#include <linux/regmap.h>
19#include <linux/reset.h>
20#include <linux/mfd/syscon.h>
21#include <linux/phy/phy.h>
22
23#include <sound/hdmi-codec.h>
24
25#include "cdn-dp-core.h"
26#include "cdn-dp-reg.h"
27#include "rockchip_drm_vop.h"
28
29#define connector_to_dp(c) \
30 container_of(c, struct cdn_dp_device, connector)
31
32#define encoder_to_dp(c) \
33 container_of(c, struct cdn_dp_device, encoder)
34
35#define GRF_SOC_CON9 0x6224
36#define DP_SEL_VOP_LIT BIT(12)
37#define GRF_SOC_CON26 0x6268
38#define DPTX_HPD_SEL (3 << 12)
39#define DPTX_HPD_DEL (2 << 12)
40#define DPTX_HPD_SEL_MASK (3 << 28)
41
42#define CDN_FW_TIMEOUT_MS (64 * 1000)
43#define CDN_DPCD_TIMEOUT_MS 5000
44#define CDN_DP_FIRMWARE "rockchip/dptx.bin"
45
46struct cdn_dp_data {
47 u8 max_phy;
48};
49
50struct cdn_dp_data rk3399_cdn_dp = {
51 .max_phy = 2,
52};
53
54static const struct of_device_id cdn_dp_dt_ids[] = {
55 { .compatible = "rockchip,rk3399-cdn-dp",
56 .data = (void *)&rk3399_cdn_dp },
57 {}
58};
59
60MODULE_DEVICE_TABLE(of, cdn_dp_dt_ids);
61
62static int cdn_dp_grf_write(struct cdn_dp_device *dp,
63 unsigned int reg, unsigned int val)
64{
65 int ret;
66
67 ret = clk_prepare_enable(dp->grf_clk);
68 if (ret) {
69 DRM_DEV_ERROR(dp->dev, "Failed to prepare_enable grf clock\n");
70 return ret;
71 }
72
73 ret = regmap_write(dp->grf, reg, val);
74 if (ret) {
75 DRM_DEV_ERROR(dp->dev, "Could not write to GRF: %d\n", ret);
76 return ret;
77 }
78
79 clk_disable_unprepare(dp->grf_clk);
80
81 return 0;
82}
83
84static int cdn_dp_clk_enable(struct cdn_dp_device *dp)
85{
86 int ret;
87 unsigned long rate;
88
89 ret = clk_prepare_enable(dp->pclk);
90 if (ret < 0) {
91 DRM_DEV_ERROR(dp->dev, "cannot enable dp pclk %d\n", ret);
92 goto err_pclk;
93 }
94
95 ret = clk_prepare_enable(dp->core_clk);
96 if (ret < 0) {
97 DRM_DEV_ERROR(dp->dev, "cannot enable core_clk %d\n", ret);
98 goto err_core_clk;
99 }
100
101 ret = pm_runtime_get_sync(dp->dev);
102 if (ret < 0) {
103 DRM_DEV_ERROR(dp->dev, "cannot get pm runtime %d\n", ret);
104 goto err_pm_runtime_get;
105 }
106
107 reset_control_assert(dp->core_rst);
108 reset_control_assert(dp->dptx_rst);
109 reset_control_assert(dp->apb_rst);
110 reset_control_deassert(dp->core_rst);
111 reset_control_deassert(dp->dptx_rst);
112 reset_control_deassert(dp->apb_rst);
113
114 rate = clk_get_rate(dp->core_clk);
115 if (!rate) {
116 DRM_DEV_ERROR(dp->dev, "get clk rate failed\n");
117 ret = -EINVAL;
118 goto err_set_rate;
119 }
120
121 cdn_dp_set_fw_clk(dp, rate);
122 cdn_dp_clock_reset(dp);
123
124 return 0;
125
126err_set_rate:
127 pm_runtime_put(dp->dev);
128err_pm_runtime_get:
129 clk_disable_unprepare(dp->core_clk);
130err_core_clk:
131 clk_disable_unprepare(dp->pclk);
132err_pclk:
133 return ret;
134}
135
136static void cdn_dp_clk_disable(struct cdn_dp_device *dp)
137{
138 pm_runtime_put_sync(dp->dev);
139 clk_disable_unprepare(dp->pclk);
140 clk_disable_unprepare(dp->core_clk);
141}
142
143static int cdn_dp_get_port_lanes(struct cdn_dp_port *port)
144{
145 struct extcon_dev *edev = port->extcon;
146 union extcon_property_value property;
147 int dptx;
148 u8 lanes;
149
150 dptx = extcon_get_state(edev, EXTCON_DISP_DP);
151 if (dptx > 0) {
152 extcon_get_property(edev, EXTCON_DISP_DP,
153 EXTCON_PROP_USB_SS, &property);
154 if (property.intval)
155 lanes = 2;
156 else
157 lanes = 4;
158 } else {
159 lanes = 0;
160 }
161
162 return lanes;
163}
164
165static int cdn_dp_get_sink_count(struct cdn_dp_device *dp, u8 *sink_count)
166{
167 int ret;
168 u8 value;
169
170 *sink_count = 0;
171 ret = cdn_dp_dpcd_read(dp, DP_SINK_COUNT, &value, 1);
172 if (ret)
173 return ret;
174
175 *sink_count = DP_GET_SINK_COUNT(value);
176 return 0;
177}
178
179static struct cdn_dp_port *cdn_dp_connected_port(struct cdn_dp_device *dp)
180{
181 struct cdn_dp_port *port;
182 int i, lanes;
183
184 for (i = 0; i < dp->ports; i++) {
185 port = dp->port[i];
186 lanes = cdn_dp_get_port_lanes(port);
187 if (lanes)
188 return port;
189 }
190 return NULL;
191}
192
193static bool cdn_dp_check_sink_connection(struct cdn_dp_device *dp)
194{
195 unsigned long timeout = jiffies + msecs_to_jiffies(CDN_DPCD_TIMEOUT_MS);
196 struct cdn_dp_port *port;
197 u8 sink_count = 0;
198
199 if (dp->active_port < 0 || dp->active_port >= dp->ports) {
200 DRM_DEV_ERROR(dp->dev, "active_port is wrong!\n");
201 return false;
202 }
203
204 port = dp->port[dp->active_port];
205
206
207
208
209
210
211
212 while (time_before(jiffies, timeout)) {
213 if (!extcon_get_state(port->extcon, EXTCON_DISP_DP))
214 return false;
215
216 if (!cdn_dp_get_sink_count(dp, &sink_count))
217 return sink_count ? true : false;
218
219 usleep_range(5000, 10000);
220 }
221
222 DRM_DEV_ERROR(dp->dev, "Get sink capability timed out\n");
223 return false;
224}
225
226static enum drm_connector_status
227cdn_dp_connector_detect(struct drm_connector *connector, bool force)
228{
229 struct cdn_dp_device *dp = connector_to_dp(connector);
230 enum drm_connector_status status = connector_status_disconnected;
231
232 mutex_lock(&dp->lock);
233 if (dp->connected)
234 status = connector_status_connected;
235 mutex_unlock(&dp->lock);
236
237 return status;
238}
239
240static void cdn_dp_connector_destroy(struct drm_connector *connector)
241{
242 drm_connector_unregister(connector);
243 drm_connector_cleanup(connector);
244}
245
246static const struct drm_connector_funcs cdn_dp_atomic_connector_funcs = {
247 .detect = cdn_dp_connector_detect,
248 .destroy = cdn_dp_connector_destroy,
249 .fill_modes = drm_helper_probe_single_connector_modes,
250 .reset = drm_atomic_helper_connector_reset,
251 .atomic_duplicate_state = drm_atomic_helper_connector_duplicate_state,
252 .atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
253};
254
255static int cdn_dp_connector_get_modes(struct drm_connector *connector)
256{
257 struct cdn_dp_device *dp = connector_to_dp(connector);
258 struct edid *edid;
259 int ret = 0;
260
261 mutex_lock(&dp->lock);
262 edid = dp->edid;
263 if (edid) {
264 DRM_DEV_DEBUG_KMS(dp->dev, "got edid: width[%d] x height[%d]\n",
265 edid->width_cm, edid->height_cm);
266
267 dp->sink_has_audio = drm_detect_monitor_audio(edid);
268 ret = drm_add_edid_modes(connector, edid);
269 if (ret)
270 drm_connector_update_edid_property(connector,
271 edid);
272 }
273 mutex_unlock(&dp->lock);
274
275 return ret;
276}
277
278static int cdn_dp_connector_mode_valid(struct drm_connector *connector,
279 struct drm_display_mode *mode)
280{
281 struct cdn_dp_device *dp = connector_to_dp(connector);
282 struct drm_display_info *display_info = &dp->connector.display_info;
283 u32 requested, actual, rate, sink_max, source_max = 0;
284 u8 lanes, bpc;
285
286
287 if (!dp->connected)
288 return MODE_BAD;
289
290 switch (display_info->bpc) {
291 case 10:
292 bpc = 10;
293 break;
294 case 6:
295 bpc = 6;
296 break;
297 default:
298 bpc = 8;
299 break;
300 }
301
302 requested = mode->clock * bpc * 3 / 1000;
303
304 source_max = dp->lanes;
305 sink_max = drm_dp_max_lane_count(dp->dpcd);
306 lanes = min(source_max, sink_max);
307
308 source_max = drm_dp_bw_code_to_link_rate(CDN_DP_MAX_LINK_RATE);
309 sink_max = drm_dp_max_link_rate(dp->dpcd);
310 rate = min(source_max, sink_max);
311
312 actual = rate * lanes / 100;
313
314
315 actual = actual * 8 / 10;
316
317 if (requested > actual) {
318 DRM_DEV_DEBUG_KMS(dp->dev,
319 "requested=%d, actual=%d, clock=%d\n",
320 requested, actual, mode->clock);
321 return MODE_CLOCK_HIGH;
322 }
323
324 return MODE_OK;
325}
326
327static struct drm_connector_helper_funcs cdn_dp_connector_helper_funcs = {
328 .get_modes = cdn_dp_connector_get_modes,
329 .mode_valid = cdn_dp_connector_mode_valid,
330};
331
332static int cdn_dp_firmware_init(struct cdn_dp_device *dp)
333{
334 int ret;
335 const u32 *iram_data, *dram_data;
336 const struct firmware *fw = dp->fw;
337 const struct cdn_firmware_header *hdr;
338
339 hdr = (struct cdn_firmware_header *)fw->data;
340 if (fw->size != le32_to_cpu(hdr->size_bytes)) {
341 DRM_DEV_ERROR(dp->dev, "firmware is invalid\n");
342 return -EINVAL;
343 }
344
345 iram_data = (const u32 *)(fw->data + hdr->header_size);
346 dram_data = (const u32 *)(fw->data + hdr->header_size + hdr->iram_size);
347
348 ret = cdn_dp_load_firmware(dp, iram_data, hdr->iram_size,
349 dram_data, hdr->dram_size);
350 if (ret)
351 return ret;
352
353 ret = cdn_dp_set_firmware_active(dp, true);
354 if (ret) {
355 DRM_DEV_ERROR(dp->dev, "active ucpu failed: %d\n", ret);
356 return ret;
357 }
358
359 return cdn_dp_event_config(dp);
360}
361
362static int cdn_dp_get_sink_capability(struct cdn_dp_device *dp)
363{
364 int ret;
365
366 if (!cdn_dp_check_sink_connection(dp))
367 return -ENODEV;
368
369 ret = cdn_dp_dpcd_read(dp, DP_DPCD_REV, dp->dpcd,
370 DP_RECEIVER_CAP_SIZE);
371 if (ret) {
372 DRM_DEV_ERROR(dp->dev, "Failed to get caps %d\n", ret);
373 return ret;
374 }
375
376 kfree(dp->edid);
377 dp->edid = drm_do_get_edid(&dp->connector,
378 cdn_dp_get_edid_block, dp);
379 return 0;
380}
381
382static int cdn_dp_enable_phy(struct cdn_dp_device *dp, struct cdn_dp_port *port)
383{
384 union extcon_property_value property;
385 int ret;
386
387 if (!port->phy_enabled) {
388 ret = phy_power_on(port->phy);
389 if (ret) {
390 DRM_DEV_ERROR(dp->dev, "phy power on failed: %d\n",
391 ret);
392 goto err_phy;
393 }
394 port->phy_enabled = true;
395 }
396
397 ret = cdn_dp_grf_write(dp, GRF_SOC_CON26,
398 DPTX_HPD_SEL_MASK | DPTX_HPD_SEL);
399 if (ret) {
400 DRM_DEV_ERROR(dp->dev, "Failed to write HPD_SEL %d\n", ret);
401 goto err_power_on;
402 }
403
404 ret = cdn_dp_get_hpd_status(dp);
405 if (ret <= 0) {
406 if (!ret)
407 DRM_DEV_ERROR(dp->dev, "hpd does not exist\n");
408 goto err_power_on;
409 }
410
411 ret = extcon_get_property(port->extcon, EXTCON_DISP_DP,
412 EXTCON_PROP_USB_TYPEC_POLARITY, &property);
413 if (ret) {
414 DRM_DEV_ERROR(dp->dev, "get property failed\n");
415 goto err_power_on;
416 }
417
418 port->lanes = cdn_dp_get_port_lanes(port);
419 ret = cdn_dp_set_host_cap(dp, port->lanes, property.intval);
420 if (ret) {
421 DRM_DEV_ERROR(dp->dev, "set host capabilities failed: %d\n",
422 ret);
423 goto err_power_on;
424 }
425
426 dp->active_port = port->id;
427 return 0;
428
429err_power_on:
430 if (phy_power_off(port->phy))
431 DRM_DEV_ERROR(dp->dev, "phy power off failed: %d", ret);
432 else
433 port->phy_enabled = false;
434
435err_phy:
436 cdn_dp_grf_write(dp, GRF_SOC_CON26,
437 DPTX_HPD_SEL_MASK | DPTX_HPD_DEL);
438 return ret;
439}
440
441static int cdn_dp_disable_phy(struct cdn_dp_device *dp,
442 struct cdn_dp_port *port)
443{
444 int ret;
445
446 if (port->phy_enabled) {
447 ret = phy_power_off(port->phy);
448 if (ret) {
449 DRM_DEV_ERROR(dp->dev, "phy power off failed: %d", ret);
450 return ret;
451 }
452 }
453
454 port->phy_enabled = false;
455 port->lanes = 0;
456 dp->active_port = -1;
457 return 0;
458}
459
460static int cdn_dp_disable(struct cdn_dp_device *dp)
461{
462 int ret, i;
463
464 if (!dp->active)
465 return 0;
466
467 for (i = 0; i < dp->ports; i++)
468 cdn_dp_disable_phy(dp, dp->port[i]);
469
470 ret = cdn_dp_grf_write(dp, GRF_SOC_CON26,
471 DPTX_HPD_SEL_MASK | DPTX_HPD_DEL);
472 if (ret) {
473 DRM_DEV_ERROR(dp->dev, "Failed to clear hpd sel %d\n",
474 ret);
475 return ret;
476 }
477
478 cdn_dp_set_firmware_active(dp, false);
479 cdn_dp_clk_disable(dp);
480 dp->active = false;
481 dp->link.rate = 0;
482 dp->link.num_lanes = 0;
483 if (!dp->connected) {
484 kfree(dp->edid);
485 dp->edid = NULL;
486 }
487
488 return 0;
489}
490
491static int cdn_dp_enable(struct cdn_dp_device *dp)
492{
493 int ret, i, lanes;
494 struct cdn_dp_port *port;
495
496 port = cdn_dp_connected_port(dp);
497 if (!port) {
498 DRM_DEV_ERROR(dp->dev,
499 "Can't enable without connection\n");
500 return -ENODEV;
501 }
502
503 if (dp->active)
504 return 0;
505
506 ret = cdn_dp_clk_enable(dp);
507 if (ret)
508 return ret;
509
510 ret = cdn_dp_firmware_init(dp);
511 if (ret) {
512 DRM_DEV_ERROR(dp->dev, "firmware init failed: %d", ret);
513 goto err_clk_disable;
514 }
515
516
517 for (i = port->id; i < dp->ports; i++) {
518 port = dp->port[i];
519 lanes = cdn_dp_get_port_lanes(port);
520 if (lanes) {
521 ret = cdn_dp_enable_phy(dp, port);
522 if (ret)
523 continue;
524
525 ret = cdn_dp_get_sink_capability(dp);
526 if (ret) {
527 cdn_dp_disable_phy(dp, port);
528 } else {
529 dp->active = true;
530 dp->lanes = port->lanes;
531 return 0;
532 }
533 }
534 }
535
536err_clk_disable:
537 cdn_dp_clk_disable(dp);
538 return ret;
539}
540
541static void cdn_dp_encoder_mode_set(struct drm_encoder *encoder,
542 struct drm_display_mode *mode,
543 struct drm_display_mode *adjusted)
544{
545 struct cdn_dp_device *dp = encoder_to_dp(encoder);
546 struct drm_display_info *display_info = &dp->connector.display_info;
547 struct video_info *video = &dp->video_info;
548
549 switch (display_info->bpc) {
550 case 10:
551 video->color_depth = 10;
552 break;
553 case 6:
554 video->color_depth = 6;
555 break;
556 default:
557 video->color_depth = 8;
558 break;
559 }
560
561 video->color_fmt = PXL_RGB;
562 video->v_sync_polarity = !!(mode->flags & DRM_MODE_FLAG_NVSYNC);
563 video->h_sync_polarity = !!(mode->flags & DRM_MODE_FLAG_NHSYNC);
564
565 memcpy(&dp->mode, adjusted, sizeof(*mode));
566}
567
568static bool cdn_dp_check_link_status(struct cdn_dp_device *dp)
569{
570 u8 link_status[DP_LINK_STATUS_SIZE];
571 struct cdn_dp_port *port = cdn_dp_connected_port(dp);
572 u8 sink_lanes = drm_dp_max_lane_count(dp->dpcd);
573
574 if (!port || !dp->link.rate || !dp->link.num_lanes)
575 return false;
576
577 if (cdn_dp_dpcd_read(dp, DP_LANE0_1_STATUS, link_status,
578 DP_LINK_STATUS_SIZE)) {
579 DRM_ERROR("Failed to get link status\n");
580 return false;
581 }
582
583
584 return drm_dp_channel_eq_ok(link_status, min(port->lanes, sink_lanes));
585}
586
587static void cdn_dp_encoder_enable(struct drm_encoder *encoder)
588{
589 struct cdn_dp_device *dp = encoder_to_dp(encoder);
590 int ret, val;
591
592 ret = drm_of_encoder_active_endpoint_id(dp->dev->of_node, encoder);
593 if (ret < 0) {
594 DRM_DEV_ERROR(dp->dev, "Could not get vop id, %d", ret);
595 return;
596 }
597
598 DRM_DEV_DEBUG_KMS(dp->dev, "vop %s output to cdn-dp\n",
599 (ret) ? "LIT" : "BIG");
600 if (ret)
601 val = DP_SEL_VOP_LIT | (DP_SEL_VOP_LIT << 16);
602 else
603 val = DP_SEL_VOP_LIT << 16;
604
605 ret = cdn_dp_grf_write(dp, GRF_SOC_CON9, val);
606 if (ret)
607 return;
608
609 mutex_lock(&dp->lock);
610
611 ret = cdn_dp_enable(dp);
612 if (ret) {
613 DRM_DEV_ERROR(dp->dev, "Failed to enable encoder %d\n",
614 ret);
615 goto out;
616 }
617 if (!cdn_dp_check_link_status(dp)) {
618 ret = cdn_dp_train_link(dp);
619 if (ret) {
620 DRM_DEV_ERROR(dp->dev, "Failed link train %d\n", ret);
621 goto out;
622 }
623 }
624
625 ret = cdn_dp_set_video_status(dp, CONTROL_VIDEO_IDLE);
626 if (ret) {
627 DRM_DEV_ERROR(dp->dev, "Failed to idle video %d\n", ret);
628 goto out;
629 }
630
631 ret = cdn_dp_config_video(dp);
632 if (ret) {
633 DRM_DEV_ERROR(dp->dev, "Failed to config video %d\n", ret);
634 goto out;
635 }
636
637 ret = cdn_dp_set_video_status(dp, CONTROL_VIDEO_VALID);
638 if (ret) {
639 DRM_DEV_ERROR(dp->dev, "Failed to valid video %d\n", ret);
640 goto out;
641 }
642out:
643 mutex_unlock(&dp->lock);
644}
645
646static void cdn_dp_encoder_disable(struct drm_encoder *encoder)
647{
648 struct cdn_dp_device *dp = encoder_to_dp(encoder);
649 int ret;
650
651 mutex_lock(&dp->lock);
652 if (dp->active) {
653 ret = cdn_dp_disable(dp);
654 if (ret) {
655 DRM_DEV_ERROR(dp->dev, "Failed to disable encoder %d\n",
656 ret);
657 }
658 }
659 mutex_unlock(&dp->lock);
660
661
662
663
664
665
666
667
668
669
670 if (!dp->connected && cdn_dp_connected_port(dp))
671 schedule_work(&dp->event_work);
672}
673
674static int cdn_dp_encoder_atomic_check(struct drm_encoder *encoder,
675 struct drm_crtc_state *crtc_state,
676 struct drm_connector_state *conn_state)
677{
678 struct rockchip_crtc_state *s = to_rockchip_crtc_state(crtc_state);
679
680 s->output_mode = ROCKCHIP_OUT_MODE_AAAA;
681 s->output_type = DRM_MODE_CONNECTOR_DisplayPort;
682
683 return 0;
684}
685
686static const struct drm_encoder_helper_funcs cdn_dp_encoder_helper_funcs = {
687 .mode_set = cdn_dp_encoder_mode_set,
688 .enable = cdn_dp_encoder_enable,
689 .disable = cdn_dp_encoder_disable,
690 .atomic_check = cdn_dp_encoder_atomic_check,
691};
692
693static const struct drm_encoder_funcs cdn_dp_encoder_funcs = {
694 .destroy = drm_encoder_cleanup,
695};
696
697static int cdn_dp_parse_dt(struct cdn_dp_device *dp)
698{
699 struct device *dev = dp->dev;
700 struct device_node *np = dev->of_node;
701 struct platform_device *pdev = to_platform_device(dev);
702 struct resource *res;
703
704 dp->grf = syscon_regmap_lookup_by_phandle(np, "rockchip,grf");
705 if (IS_ERR(dp->grf)) {
706 DRM_DEV_ERROR(dev, "cdn-dp needs rockchip,grf property\n");
707 return PTR_ERR(dp->grf);
708 }
709
710 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
711 dp->regs = devm_ioremap_resource(dev, res);
712 if (IS_ERR(dp->regs)) {
713 DRM_DEV_ERROR(dev, "ioremap reg failed\n");
714 return PTR_ERR(dp->regs);
715 }
716
717 dp->core_clk = devm_clk_get(dev, "core-clk");
718 if (IS_ERR(dp->core_clk)) {
719 DRM_DEV_ERROR(dev, "cannot get core_clk_dp\n");
720 return PTR_ERR(dp->core_clk);
721 }
722
723 dp->pclk = devm_clk_get(dev, "pclk");
724 if (IS_ERR(dp->pclk)) {
725 DRM_DEV_ERROR(dev, "cannot get pclk\n");
726 return PTR_ERR(dp->pclk);
727 }
728
729 dp->spdif_clk = devm_clk_get(dev, "spdif");
730 if (IS_ERR(dp->spdif_clk)) {
731 DRM_DEV_ERROR(dev, "cannot get spdif_clk\n");
732 return PTR_ERR(dp->spdif_clk);
733 }
734
735 dp->grf_clk = devm_clk_get(dev, "grf");
736 if (IS_ERR(dp->grf_clk)) {
737 DRM_DEV_ERROR(dev, "cannot get grf clk\n");
738 return PTR_ERR(dp->grf_clk);
739 }
740
741 dp->spdif_rst = devm_reset_control_get(dev, "spdif");
742 if (IS_ERR(dp->spdif_rst)) {
743 DRM_DEV_ERROR(dev, "no spdif reset control found\n");
744 return PTR_ERR(dp->spdif_rst);
745 }
746
747 dp->dptx_rst = devm_reset_control_get(dev, "dptx");
748 if (IS_ERR(dp->dptx_rst)) {
749 DRM_DEV_ERROR(dev, "no uphy reset control found\n");
750 return PTR_ERR(dp->dptx_rst);
751 }
752
753 dp->core_rst = devm_reset_control_get(dev, "core");
754 if (IS_ERR(dp->core_rst)) {
755 DRM_DEV_ERROR(dev, "no core reset control found\n");
756 return PTR_ERR(dp->core_rst);
757 }
758
759 dp->apb_rst = devm_reset_control_get(dev, "apb");
760 if (IS_ERR(dp->apb_rst)) {
761 DRM_DEV_ERROR(dev, "no apb reset control found\n");
762 return PTR_ERR(dp->apb_rst);
763 }
764
765 return 0;
766}
767
768static int cdn_dp_audio_hw_params(struct device *dev, void *data,
769 struct hdmi_codec_daifmt *daifmt,
770 struct hdmi_codec_params *params)
771{
772 struct cdn_dp_device *dp = dev_get_drvdata(dev);
773 struct audio_info audio = {
774 .sample_width = params->sample_width,
775 .sample_rate = params->sample_rate,
776 .channels = params->channels,
777 };
778 int ret;
779
780 mutex_lock(&dp->lock);
781 if (!dp->active) {
782 ret = -ENODEV;
783 goto out;
784 }
785
786 switch (daifmt->fmt) {
787 case HDMI_I2S:
788 audio.format = AFMT_I2S;
789 break;
790 case HDMI_SPDIF:
791 audio.format = AFMT_SPDIF;
792 break;
793 default:
794 DRM_DEV_ERROR(dev, "Invalid format %d\n", daifmt->fmt);
795 ret = -EINVAL;
796 goto out;
797 }
798
799 ret = cdn_dp_audio_config(dp, &audio);
800 if (!ret)
801 dp->audio_info = audio;
802
803out:
804 mutex_unlock(&dp->lock);
805 return ret;
806}
807
808static void cdn_dp_audio_shutdown(struct device *dev, void *data)
809{
810 struct cdn_dp_device *dp = dev_get_drvdata(dev);
811 int ret;
812
813 mutex_lock(&dp->lock);
814 if (!dp->active)
815 goto out;
816
817 ret = cdn_dp_audio_stop(dp, &dp->audio_info);
818 if (!ret)
819 dp->audio_info.format = AFMT_UNUSED;
820out:
821 mutex_unlock(&dp->lock);
822}
823
824static int cdn_dp_audio_digital_mute(struct device *dev, void *data,
825 bool enable)
826{
827 struct cdn_dp_device *dp = dev_get_drvdata(dev);
828 int ret;
829
830 mutex_lock(&dp->lock);
831 if (!dp->active) {
832 ret = -ENODEV;
833 goto out;
834 }
835
836 ret = cdn_dp_audio_mute(dp, enable);
837
838out:
839 mutex_unlock(&dp->lock);
840 return ret;
841}
842
843static int cdn_dp_audio_get_eld(struct device *dev, void *data,
844 u8 *buf, size_t len)
845{
846 struct cdn_dp_device *dp = dev_get_drvdata(dev);
847
848 memcpy(buf, dp->connector.eld, min(sizeof(dp->connector.eld), len));
849
850 return 0;
851}
852
853static const struct hdmi_codec_ops audio_codec_ops = {
854 .hw_params = cdn_dp_audio_hw_params,
855 .audio_shutdown = cdn_dp_audio_shutdown,
856 .digital_mute = cdn_dp_audio_digital_mute,
857 .get_eld = cdn_dp_audio_get_eld,
858};
859
860static int cdn_dp_audio_codec_init(struct cdn_dp_device *dp,
861 struct device *dev)
862{
863 struct hdmi_codec_pdata codec_data = {
864 .i2s = 1,
865 .spdif = 1,
866 .ops = &audio_codec_ops,
867 .max_i2s_channels = 8,
868 };
869
870 dp->audio_pdev = platform_device_register_data(
871 dev, HDMI_CODEC_DRV_NAME, PLATFORM_DEVID_AUTO,
872 &codec_data, sizeof(codec_data));
873
874 return PTR_ERR_OR_ZERO(dp->audio_pdev);
875}
876
877static int cdn_dp_request_firmware(struct cdn_dp_device *dp)
878{
879 int ret;
880 unsigned long timeout = jiffies + msecs_to_jiffies(CDN_FW_TIMEOUT_MS);
881 unsigned long sleep = 1000;
882
883 WARN_ON(!mutex_is_locked(&dp->lock));
884
885 if (dp->fw_loaded)
886 return 0;
887
888
889 mutex_unlock(&dp->lock);
890
891 while (time_before(jiffies, timeout)) {
892 ret = request_firmware(&dp->fw, CDN_DP_FIRMWARE, dp->dev);
893 if (ret == -ENOENT) {
894 msleep(sleep);
895 sleep *= 2;
896 continue;
897 } else if (ret) {
898 DRM_DEV_ERROR(dp->dev,
899 "failed to request firmware: %d\n", ret);
900 goto out;
901 }
902
903 dp->fw_loaded = true;
904 ret = 0;
905 goto out;
906 }
907
908 DRM_DEV_ERROR(dp->dev, "Timed out trying to load firmware\n");
909 ret = -ETIMEDOUT;
910out:
911 mutex_lock(&dp->lock);
912 return ret;
913}
914
915static void cdn_dp_pd_event_work(struct work_struct *work)
916{
917 struct cdn_dp_device *dp = container_of(work, struct cdn_dp_device,
918 event_work);
919 struct drm_connector *connector = &dp->connector;
920 enum drm_connector_status old_status;
921
922 int ret;
923
924 mutex_lock(&dp->lock);
925
926 if (dp->suspended)
927 goto out;
928
929 ret = cdn_dp_request_firmware(dp);
930 if (ret)
931 goto out;
932
933 dp->connected = true;
934
935
936 if (!cdn_dp_connected_port(dp)) {
937 DRM_DEV_INFO(dp->dev, "Not connected. Disabling cdn\n");
938 dp->connected = false;
939
940
941 } else if (!dp->active) {
942 DRM_DEV_INFO(dp->dev, "Connected, not enabled. Enabling cdn\n");
943 ret = cdn_dp_enable(dp);
944 if (ret) {
945 DRM_DEV_ERROR(dp->dev, "Enable dp failed %d\n", ret);
946 dp->connected = false;
947 }
948
949
950 } else if (!cdn_dp_check_sink_connection(dp)) {
951 DRM_DEV_INFO(dp->dev, "Connected without sink. Assert hpd\n");
952 dp->connected = false;
953
954
955 } else if (!cdn_dp_check_link_status(dp)) {
956 unsigned int rate = dp->link.rate;
957 unsigned int lanes = dp->link.num_lanes;
958 struct drm_display_mode *mode = &dp->mode;
959
960 DRM_DEV_INFO(dp->dev, "Connected with sink. Re-train link\n");
961 ret = cdn_dp_train_link(dp);
962 if (ret) {
963 dp->connected = false;
964 DRM_DEV_ERROR(dp->dev, "Train link failed %d\n", ret);
965 goto out;
966 }
967
968
969 if (mode->clock &&
970 (rate != dp->link.rate || lanes != dp->link.num_lanes)) {
971 ret = cdn_dp_config_video(dp);
972 if (ret) {
973 dp->connected = false;
974 DRM_DEV_ERROR(dp->dev,
975 "Failed to config video %d\n",
976 ret);
977 }
978 }
979 }
980
981out:
982 mutex_unlock(&dp->lock);
983
984 old_status = connector->status;
985 connector->status = connector->funcs->detect(connector, false);
986 if (old_status != connector->status)
987 drm_kms_helper_hotplug_event(dp->drm_dev);
988}
989
990static int cdn_dp_pd_event(struct notifier_block *nb,
991 unsigned long event, void *priv)
992{
993 struct cdn_dp_port *port = container_of(nb, struct cdn_dp_port,
994 event_nb);
995 struct cdn_dp_device *dp = port->dp;
996
997
998
999
1000
1001
1002 schedule_work(&dp->event_work);
1003
1004 return NOTIFY_DONE;
1005}
1006
1007static int cdn_dp_bind(struct device *dev, struct device *master, void *data)
1008{
1009 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1010 struct drm_encoder *encoder;
1011 struct drm_connector *connector;
1012 struct cdn_dp_port *port;
1013 struct drm_device *drm_dev = data;
1014 int ret, i;
1015
1016 ret = cdn_dp_parse_dt(dp);
1017 if (ret < 0)
1018 return ret;
1019
1020 dp->drm_dev = drm_dev;
1021 dp->connected = false;
1022 dp->active = false;
1023 dp->active_port = -1;
1024 dp->fw_loaded = false;
1025
1026 INIT_WORK(&dp->event_work, cdn_dp_pd_event_work);
1027
1028 encoder = &dp->encoder;
1029
1030 encoder->possible_crtcs = drm_of_find_possible_crtcs(drm_dev,
1031 dev->of_node);
1032 DRM_DEBUG_KMS("possible_crtcs = 0x%x\n", encoder->possible_crtcs);
1033
1034 ret = drm_encoder_init(drm_dev, encoder, &cdn_dp_encoder_funcs,
1035 DRM_MODE_ENCODER_TMDS, NULL);
1036 if (ret) {
1037 DRM_ERROR("failed to initialize encoder with drm\n");
1038 return ret;
1039 }
1040
1041 drm_encoder_helper_add(encoder, &cdn_dp_encoder_helper_funcs);
1042
1043 connector = &dp->connector;
1044 connector->polled = DRM_CONNECTOR_POLL_HPD;
1045 connector->dpms = DRM_MODE_DPMS_OFF;
1046
1047 ret = drm_connector_init(drm_dev, connector,
1048 &cdn_dp_atomic_connector_funcs,
1049 DRM_MODE_CONNECTOR_DisplayPort);
1050 if (ret) {
1051 DRM_ERROR("failed to initialize connector with drm\n");
1052 goto err_free_encoder;
1053 }
1054
1055 drm_connector_helper_add(connector, &cdn_dp_connector_helper_funcs);
1056
1057 ret = drm_connector_attach_encoder(connector, encoder);
1058 if (ret) {
1059 DRM_ERROR("failed to attach connector and encoder\n");
1060 goto err_free_connector;
1061 }
1062
1063 for (i = 0; i < dp->ports; i++) {
1064 port = dp->port[i];
1065
1066 port->event_nb.notifier_call = cdn_dp_pd_event;
1067 ret = devm_extcon_register_notifier(dp->dev, port->extcon,
1068 EXTCON_DISP_DP,
1069 &port->event_nb);
1070 if (ret) {
1071 DRM_DEV_ERROR(dev,
1072 "register EXTCON_DISP_DP notifier err\n");
1073 goto err_free_connector;
1074 }
1075 }
1076
1077 pm_runtime_enable(dev);
1078
1079 schedule_work(&dp->event_work);
1080
1081 return 0;
1082
1083err_free_connector:
1084 drm_connector_cleanup(connector);
1085err_free_encoder:
1086 drm_encoder_cleanup(encoder);
1087 return ret;
1088}
1089
1090static void cdn_dp_unbind(struct device *dev, struct device *master, void *data)
1091{
1092 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1093 struct drm_encoder *encoder = &dp->encoder;
1094 struct drm_connector *connector = &dp->connector;
1095
1096 cancel_work_sync(&dp->event_work);
1097 cdn_dp_encoder_disable(encoder);
1098 encoder->funcs->destroy(encoder);
1099 connector->funcs->destroy(connector);
1100
1101 pm_runtime_disable(dev);
1102 if (dp->fw_loaded)
1103 release_firmware(dp->fw);
1104 kfree(dp->edid);
1105 dp->edid = NULL;
1106}
1107
1108static const struct component_ops cdn_dp_component_ops = {
1109 .bind = cdn_dp_bind,
1110 .unbind = cdn_dp_unbind,
1111};
1112
1113int cdn_dp_suspend(struct device *dev)
1114{
1115 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1116 int ret = 0;
1117
1118 mutex_lock(&dp->lock);
1119 if (dp->active)
1120 ret = cdn_dp_disable(dp);
1121 dp->suspended = true;
1122 mutex_unlock(&dp->lock);
1123
1124 return ret;
1125}
1126
1127int cdn_dp_resume(struct device *dev)
1128{
1129 struct cdn_dp_device *dp = dev_get_drvdata(dev);
1130
1131 mutex_lock(&dp->lock);
1132 dp->suspended = false;
1133 if (dp->fw_loaded)
1134 schedule_work(&dp->event_work);
1135 mutex_unlock(&dp->lock);
1136
1137 return 0;
1138}
1139
1140static int cdn_dp_probe(struct platform_device *pdev)
1141{
1142 struct device *dev = &pdev->dev;
1143 const struct of_device_id *match;
1144 struct cdn_dp_data *dp_data;
1145 struct cdn_dp_port *port;
1146 struct cdn_dp_device *dp;
1147 struct extcon_dev *extcon;
1148 struct phy *phy;
1149 int i;
1150
1151 dp = devm_kzalloc(dev, sizeof(*dp), GFP_KERNEL);
1152 if (!dp)
1153 return -ENOMEM;
1154 dp->dev = dev;
1155
1156 match = of_match_node(cdn_dp_dt_ids, pdev->dev.of_node);
1157 dp_data = (struct cdn_dp_data *)match->data;
1158
1159 for (i = 0; i < dp_data->max_phy; i++) {
1160 extcon = extcon_get_edev_by_phandle(dev, i);
1161 phy = devm_of_phy_get_by_index(dev, dev->of_node, i);
1162
1163 if (PTR_ERR(extcon) == -EPROBE_DEFER ||
1164 PTR_ERR(phy) == -EPROBE_DEFER)
1165 return -EPROBE_DEFER;
1166
1167 if (IS_ERR(extcon) || IS_ERR(phy))
1168 continue;
1169
1170 port = devm_kzalloc(dev, sizeof(*port), GFP_KERNEL);
1171 if (!port)
1172 return -ENOMEM;
1173
1174 port->extcon = extcon;
1175 port->phy = phy;
1176 port->dp = dp;
1177 port->id = i;
1178 dp->port[dp->ports++] = port;
1179 }
1180
1181 if (!dp->ports) {
1182 DRM_DEV_ERROR(dev, "missing extcon or phy\n");
1183 return -EINVAL;
1184 }
1185
1186 mutex_init(&dp->lock);
1187 dev_set_drvdata(dev, dp);
1188
1189 cdn_dp_audio_codec_init(dp, dev);
1190
1191 return component_add(dev, &cdn_dp_component_ops);
1192}
1193
1194static int cdn_dp_remove(struct platform_device *pdev)
1195{
1196 struct cdn_dp_device *dp = platform_get_drvdata(pdev);
1197
1198 platform_device_unregister(dp->audio_pdev);
1199 cdn_dp_suspend(dp->dev);
1200 component_del(&pdev->dev, &cdn_dp_component_ops);
1201
1202 return 0;
1203}
1204
1205static void cdn_dp_shutdown(struct platform_device *pdev)
1206{
1207 struct cdn_dp_device *dp = platform_get_drvdata(pdev);
1208
1209 cdn_dp_suspend(dp->dev);
1210}
1211
1212static const struct dev_pm_ops cdn_dp_pm_ops = {
1213 SET_SYSTEM_SLEEP_PM_OPS(cdn_dp_suspend,
1214 cdn_dp_resume)
1215};
1216
1217struct platform_driver cdn_dp_driver = {
1218 .probe = cdn_dp_probe,
1219 .remove = cdn_dp_remove,
1220 .shutdown = cdn_dp_shutdown,
1221 .driver = {
1222 .name = "cdn-dp",
1223 .owner = THIS_MODULE,
1224 .of_match_table = of_match_ptr(cdn_dp_dt_ids),
1225 .pm = &cdn_dp_pm_ops,
1226 },
1227};
1228