1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23#include <drm/drmP.h>
24#include "amdgpu.h"
25#include "amdgpu_pm.h"
26#include "amdgpu_i2c.h"
27#include "cikd.h"
28#include "atom.h"
29#include "amdgpu_atombios.h"
30#include "atombios_crtc.h"
31#include "atombios_encoders.h"
32#include "amdgpu_pll.h"
33#include "amdgpu_connectors.h"
34#include "dce_v8_0.h"
35
36#include "dce/dce_8_0_d.h"
37#include "dce/dce_8_0_sh_mask.h"
38
39#include "gca/gfx_7_2_enum.h"
40
41#include "gmc/gmc_7_1_d.h"
42#include "gmc/gmc_7_1_sh_mask.h"
43
44#include "oss/oss_2_0_d.h"
45#include "oss/oss_2_0_sh_mask.h"
46
47static void dce_v8_0_set_display_funcs(struct amdgpu_device *adev);
48static void dce_v8_0_set_irq_funcs(struct amdgpu_device *adev);
49
50static const u32 crtc_offsets[6] =
51{
52 CRTC0_REGISTER_OFFSET,
53 CRTC1_REGISTER_OFFSET,
54 CRTC2_REGISTER_OFFSET,
55 CRTC3_REGISTER_OFFSET,
56 CRTC4_REGISTER_OFFSET,
57 CRTC5_REGISTER_OFFSET
58};
59
60static const u32 hpd_offsets[] =
61{
62 HPD0_REGISTER_OFFSET,
63 HPD1_REGISTER_OFFSET,
64 HPD2_REGISTER_OFFSET,
65 HPD3_REGISTER_OFFSET,
66 HPD4_REGISTER_OFFSET,
67 HPD5_REGISTER_OFFSET
68};
69
70static const uint32_t dig_offsets[] = {
71 CRTC0_REGISTER_OFFSET,
72 CRTC1_REGISTER_OFFSET,
73 CRTC2_REGISTER_OFFSET,
74 CRTC3_REGISTER_OFFSET,
75 CRTC4_REGISTER_OFFSET,
76 CRTC5_REGISTER_OFFSET,
77 (0x13830 - 0x7030) >> 2,
78};
79
80static const struct {
81 uint32_t reg;
82 uint32_t vblank;
83 uint32_t vline;
84 uint32_t hpd;
85
86} interrupt_status_offsets[6] = { {
87 .reg = mmDISP_INTERRUPT_STATUS,
88 .vblank = DISP_INTERRUPT_STATUS__LB_D1_VBLANK_INTERRUPT_MASK,
89 .vline = DISP_INTERRUPT_STATUS__LB_D1_VLINE_INTERRUPT_MASK,
90 .hpd = DISP_INTERRUPT_STATUS__DC_HPD1_INTERRUPT_MASK
91}, {
92 .reg = mmDISP_INTERRUPT_STATUS_CONTINUE,
93 .vblank = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VBLANK_INTERRUPT_MASK,
94 .vline = DISP_INTERRUPT_STATUS_CONTINUE__LB_D2_VLINE_INTERRUPT_MASK,
95 .hpd = DISP_INTERRUPT_STATUS_CONTINUE__DC_HPD2_INTERRUPT_MASK
96}, {
97 .reg = mmDISP_INTERRUPT_STATUS_CONTINUE2,
98 .vblank = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VBLANK_INTERRUPT_MASK,
99 .vline = DISP_INTERRUPT_STATUS_CONTINUE2__LB_D3_VLINE_INTERRUPT_MASK,
100 .hpd = DISP_INTERRUPT_STATUS_CONTINUE2__DC_HPD3_INTERRUPT_MASK
101}, {
102 .reg = mmDISP_INTERRUPT_STATUS_CONTINUE3,
103 .vblank = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VBLANK_INTERRUPT_MASK,
104 .vline = DISP_INTERRUPT_STATUS_CONTINUE3__LB_D4_VLINE_INTERRUPT_MASK,
105 .hpd = DISP_INTERRUPT_STATUS_CONTINUE3__DC_HPD4_INTERRUPT_MASK
106}, {
107 .reg = mmDISP_INTERRUPT_STATUS_CONTINUE4,
108 .vblank = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VBLANK_INTERRUPT_MASK,
109 .vline = DISP_INTERRUPT_STATUS_CONTINUE4__LB_D5_VLINE_INTERRUPT_MASK,
110 .hpd = DISP_INTERRUPT_STATUS_CONTINUE4__DC_HPD5_INTERRUPT_MASK
111}, {
112 .reg = mmDISP_INTERRUPT_STATUS_CONTINUE5,
113 .vblank = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VBLANK_INTERRUPT_MASK,
114 .vline = DISP_INTERRUPT_STATUS_CONTINUE5__LB_D6_VLINE_INTERRUPT_MASK,
115 .hpd = DISP_INTERRUPT_STATUS_CONTINUE5__DC_HPD6_INTERRUPT_MASK
116} };
117
118static u32 dce_v8_0_audio_endpt_rreg(struct amdgpu_device *adev,
119 u32 block_offset, u32 reg)
120{
121 unsigned long flags;
122 u32 r;
123
124 spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags);
125 WREG32(mmAZALIA_F0_CODEC_ENDPOINT_INDEX + block_offset, reg);
126 r = RREG32(mmAZALIA_F0_CODEC_ENDPOINT_DATA + block_offset);
127 spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags);
128
129 return r;
130}
131
132static void dce_v8_0_audio_endpt_wreg(struct amdgpu_device *adev,
133 u32 block_offset, u32 reg, u32 v)
134{
135 unsigned long flags;
136
137 spin_lock_irqsave(&adev->audio_endpt_idx_lock, flags);
138 WREG32(mmAZALIA_F0_CODEC_ENDPOINT_INDEX + block_offset, reg);
139 WREG32(mmAZALIA_F0_CODEC_ENDPOINT_DATA + block_offset, v);
140 spin_unlock_irqrestore(&adev->audio_endpt_idx_lock, flags);
141}
142
143static u32 dce_v8_0_vblank_get_counter(struct amdgpu_device *adev, int crtc)
144{
145 if (crtc >= adev->mode_info.num_crtc)
146 return 0;
147 else
148 return RREG32(mmCRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
149}
150
151static void dce_v8_0_pageflip_interrupt_init(struct amdgpu_device *adev)
152{
153 unsigned i;
154
155
156 for (i = 0; i < adev->mode_info.num_crtc; i++)
157 amdgpu_irq_get(adev, &adev->pageflip_irq, i);
158}
159
160static void dce_v8_0_pageflip_interrupt_fini(struct amdgpu_device *adev)
161{
162 unsigned i;
163
164
165 for (i = 0; i < adev->mode_info.num_crtc; i++)
166 amdgpu_irq_put(adev, &adev->pageflip_irq, i);
167}
168
169
170
171
172
173
174
175
176
177
178
179static void dce_v8_0_page_flip(struct amdgpu_device *adev,
180 int crtc_id, u64 crtc_base, bool async)
181{
182 struct amdgpu_crtc *amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
183
184
185 WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, async ?
186 GRPH_FLIP_CONTROL__GRPH_SURFACE_UPDATE_H_RETRACE_EN_MASK : 0);
187
188 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset,
189 upper_32_bits(crtc_base));
190
191 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset,
192 lower_32_bits(crtc_base));
193
194 RREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset);
195}
196
197static int dce_v8_0_crtc_get_scanoutpos(struct amdgpu_device *adev, int crtc,
198 u32 *vbl, u32 *position)
199{
200 if ((crtc < 0) || (crtc >= adev->mode_info.num_crtc))
201 return -EINVAL;
202
203 *vbl = RREG32(mmCRTC_V_BLANK_START_END + crtc_offsets[crtc]);
204 *position = RREG32(mmCRTC_STATUS_POSITION + crtc_offsets[crtc]);
205
206 return 0;
207}
208
209
210
211
212
213
214
215
216
217
218static bool dce_v8_0_hpd_sense(struct amdgpu_device *adev,
219 enum amdgpu_hpd_id hpd)
220{
221 bool connected = false;
222
223 if (hpd >= adev->mode_info.num_hpd)
224 return connected;
225
226 if (RREG32(mmDC_HPD1_INT_STATUS + hpd_offsets[hpd]) &
227 DC_HPD1_INT_STATUS__DC_HPD1_SENSE_MASK)
228 connected = true;
229
230 return connected;
231}
232
233
234
235
236
237
238
239
240
241static void dce_v8_0_hpd_set_polarity(struct amdgpu_device *adev,
242 enum amdgpu_hpd_id hpd)
243{
244 u32 tmp;
245 bool connected = dce_v8_0_hpd_sense(adev, hpd);
246
247 if (hpd >= adev->mode_info.num_hpd)
248 return;
249
250 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]);
251 if (connected)
252 tmp &= ~DC_HPD1_INT_CONTROL__DC_HPD1_INT_POLARITY_MASK;
253 else
254 tmp |= DC_HPD1_INT_CONTROL__DC_HPD1_INT_POLARITY_MASK;
255 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp);
256}
257
258
259
260
261
262
263
264
265
266static void dce_v8_0_hpd_init(struct amdgpu_device *adev)
267{
268 struct drm_device *dev = adev->ddev;
269 struct drm_connector *connector;
270 u32 tmp;
271
272 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
273 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
274
275 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd)
276 continue;
277
278 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]);
279 tmp |= DC_HPD1_CONTROL__DC_HPD1_EN_MASK;
280 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp);
281
282 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
283 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
284
285
286
287
288
289 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]);
290 tmp &= ~DC_HPD1_INT_CONTROL__DC_HPD1_INT_EN_MASK;
291 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], tmp);
292 continue;
293 }
294
295 dce_v8_0_hpd_set_polarity(adev, amdgpu_connector->hpd.hpd);
296 amdgpu_irq_get(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd);
297 }
298}
299
300
301
302
303
304
305
306
307
308static void dce_v8_0_hpd_fini(struct amdgpu_device *adev)
309{
310 struct drm_device *dev = adev->ddev;
311 struct drm_connector *connector;
312 u32 tmp;
313
314 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
315 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
316
317 if (amdgpu_connector->hpd.hpd >= adev->mode_info.num_hpd)
318 continue;
319
320 tmp = RREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd]);
321 tmp &= ~DC_HPD1_CONTROL__DC_HPD1_EN_MASK;
322 WREG32(mmDC_HPD1_CONTROL + hpd_offsets[amdgpu_connector->hpd.hpd], 0);
323
324 amdgpu_irq_put(adev, &adev->hpd_irq, amdgpu_connector->hpd.hpd);
325 }
326}
327
328static u32 dce_v8_0_hpd_get_gpio_reg(struct amdgpu_device *adev)
329{
330 return mmDC_GPIO_HPD_A;
331}
332
333static bool dce_v8_0_is_display_hung(struct amdgpu_device *adev)
334{
335 u32 crtc_hung = 0;
336 u32 crtc_status[6];
337 u32 i, j, tmp;
338
339 for (i = 0; i < adev->mode_info.num_crtc; i++) {
340 if (RREG32(mmCRTC_CONTROL + crtc_offsets[i]) & CRTC_CONTROL__CRTC_MASTER_EN_MASK) {
341 crtc_status[i] = RREG32(mmCRTC_STATUS_HV_COUNT + crtc_offsets[i]);
342 crtc_hung |= (1 << i);
343 }
344 }
345
346 for (j = 0; j < 10; j++) {
347 for (i = 0; i < adev->mode_info.num_crtc; i++) {
348 if (crtc_hung & (1 << i)) {
349 tmp = RREG32(mmCRTC_STATUS_HV_COUNT + crtc_offsets[i]);
350 if (tmp != crtc_status[i])
351 crtc_hung &= ~(1 << i);
352 }
353 }
354 if (crtc_hung == 0)
355 return false;
356 udelay(100);
357 }
358
359 return true;
360}
361
362static void dce_v8_0_set_vga_render_state(struct amdgpu_device *adev,
363 bool render)
364{
365 u32 tmp;
366
367
368 tmp = RREG32(mmVGA_HDP_CONTROL);
369 if (render)
370 tmp = REG_SET_FIELD(tmp, VGA_HDP_CONTROL, VGA_MEMORY_DISABLE, 0);
371 else
372 tmp = REG_SET_FIELD(tmp, VGA_HDP_CONTROL, VGA_MEMORY_DISABLE, 1);
373 WREG32(mmVGA_HDP_CONTROL, tmp);
374
375
376 tmp = RREG32(mmVGA_RENDER_CONTROL);
377 if (render)
378 tmp = REG_SET_FIELD(tmp, VGA_RENDER_CONTROL, VGA_VSTATUS_CNTL, 1);
379 else
380 tmp = REG_SET_FIELD(tmp, VGA_RENDER_CONTROL, VGA_VSTATUS_CNTL, 0);
381 WREG32(mmVGA_RENDER_CONTROL, tmp);
382}
383
384static int dce_v8_0_get_num_crtc(struct amdgpu_device *adev)
385{
386 int num_crtc = 0;
387
388 switch (adev->asic_type) {
389 case CHIP_BONAIRE:
390 case CHIP_HAWAII:
391 num_crtc = 6;
392 break;
393 case CHIP_KAVERI:
394 num_crtc = 4;
395 break;
396 case CHIP_KABINI:
397 case CHIP_MULLINS:
398 num_crtc = 2;
399 break;
400 default:
401 num_crtc = 0;
402 }
403 return num_crtc;
404}
405
406void dce_v8_0_disable_dce(struct amdgpu_device *adev)
407{
408
409 if (amdgpu_atombios_has_dce_engine_info(adev)) {
410 u32 tmp;
411 int crtc_enabled, i;
412
413 dce_v8_0_set_vga_render_state(adev, false);
414
415
416 for (i = 0; i < dce_v8_0_get_num_crtc(adev); i++) {
417 crtc_enabled = REG_GET_FIELD(RREG32(mmCRTC_CONTROL + crtc_offsets[i]),
418 CRTC_CONTROL, CRTC_MASTER_EN);
419 if (crtc_enabled) {
420 WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 1);
421 tmp = RREG32(mmCRTC_CONTROL + crtc_offsets[i]);
422 tmp = REG_SET_FIELD(tmp, CRTC_CONTROL, CRTC_MASTER_EN, 0);
423 WREG32(mmCRTC_CONTROL + crtc_offsets[i], tmp);
424 WREG32(mmCRTC_UPDATE_LOCK + crtc_offsets[i], 0);
425 }
426 }
427 }
428}
429
430static void dce_v8_0_program_fmt(struct drm_encoder *encoder)
431{
432 struct drm_device *dev = encoder->dev;
433 struct amdgpu_device *adev = dev->dev_private;
434 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
435 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
436 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
437 int bpc = 0;
438 u32 tmp = 0;
439 enum amdgpu_connector_dither dither = AMDGPU_FMT_DITHER_DISABLE;
440
441 if (connector) {
442 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
443 bpc = amdgpu_connector_get_monitor_bpc(connector);
444 dither = amdgpu_connector->dither;
445 }
446
447
448 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
449 return;
450
451
452 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
453 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
454 return;
455
456 if (bpc == 0)
457 return;
458
459 switch (bpc) {
460 case 6:
461 if (dither == AMDGPU_FMT_DITHER_ENABLE)
462
463 tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_FRAME_RANDOM_ENABLE_MASK |
464 FMT_BIT_DEPTH_CONTROL__FMT_HIGHPASS_RANDOM_ENABLE_MASK |
465 FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_EN_MASK |
466 (0 << FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_DEPTH__SHIFT));
467 else
468 tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_EN_MASK |
469 (0 << FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_DEPTH__SHIFT));
470 break;
471 case 8:
472 if (dither == AMDGPU_FMT_DITHER_ENABLE)
473
474 tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_FRAME_RANDOM_ENABLE_MASK |
475 FMT_BIT_DEPTH_CONTROL__FMT_HIGHPASS_RANDOM_ENABLE_MASK |
476 FMT_BIT_DEPTH_CONTROL__FMT_RGB_RANDOM_ENABLE_MASK |
477 FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_EN_MASK |
478 (1 << FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_DEPTH__SHIFT));
479 else
480 tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_EN_MASK |
481 (1 << FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_DEPTH__SHIFT));
482 break;
483 case 10:
484 if (dither == AMDGPU_FMT_DITHER_ENABLE)
485
486 tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_FRAME_RANDOM_ENABLE_MASK |
487 FMT_BIT_DEPTH_CONTROL__FMT_HIGHPASS_RANDOM_ENABLE_MASK |
488 FMT_BIT_DEPTH_CONTROL__FMT_RGB_RANDOM_ENABLE_MASK |
489 FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_EN_MASK |
490 (2 << FMT_BIT_DEPTH_CONTROL__FMT_SPATIAL_DITHER_DEPTH__SHIFT));
491 else
492 tmp |= (FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_EN_MASK |
493 (2 << FMT_BIT_DEPTH_CONTROL__FMT_TRUNCATE_DEPTH__SHIFT));
494 break;
495 default:
496
497 break;
498 }
499
500 WREG32(mmFMT_BIT_DEPTH_CONTROL + amdgpu_crtc->crtc_offset, tmp);
501}
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517static u32 dce_v8_0_line_buffer_adjust(struct amdgpu_device *adev,
518 struct amdgpu_crtc *amdgpu_crtc,
519 struct drm_display_mode *mode)
520{
521 u32 tmp, buffer_alloc, i;
522 u32 pipe_offset = amdgpu_crtc->crtc_id * 0x8;
523
524
525
526
527
528
529
530
531 if (amdgpu_crtc->base.enabled && mode) {
532 if (mode->crtc_hdisplay < 1920) {
533 tmp = 1;
534 buffer_alloc = 2;
535 } else if (mode->crtc_hdisplay < 2560) {
536 tmp = 2;
537 buffer_alloc = 2;
538 } else if (mode->crtc_hdisplay < 4096) {
539 tmp = 0;
540 buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4;
541 } else {
542 DRM_DEBUG_KMS("Mode too big for LB!\n");
543 tmp = 0;
544 buffer_alloc = (adev->flags & AMD_IS_APU) ? 2 : 4;
545 }
546 } else {
547 tmp = 1;
548 buffer_alloc = 0;
549 }
550
551 WREG32(mmLB_MEMORY_CTRL + amdgpu_crtc->crtc_offset,
552 (tmp << LB_MEMORY_CTRL__LB_MEMORY_CONFIG__SHIFT) |
553 (0x6B0 << LB_MEMORY_CTRL__LB_MEMORY_SIZE__SHIFT));
554
555 WREG32(mmPIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
556 (buffer_alloc << PIPE0_DMIF_BUFFER_CONTROL__DMIF_BUFFERS_ALLOCATED__SHIFT));
557 for (i = 0; i < adev->usec_timeout; i++) {
558 if (RREG32(mmPIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
559 PIPE0_DMIF_BUFFER_CONTROL__DMIF_BUFFERS_ALLOCATION_COMPLETED_MASK)
560 break;
561 udelay(1);
562 }
563
564 if (amdgpu_crtc->base.enabled && mode) {
565 switch (tmp) {
566 case 0:
567 default:
568 return 4096 * 2;
569 case 1:
570 return 1920 * 2;
571 case 2:
572 return 2560 * 2;
573 }
574 }
575
576
577 return 0;
578}
579
580
581
582
583
584
585
586
587
588
589static u32 cik_get_number_of_dram_channels(struct amdgpu_device *adev)
590{
591 u32 tmp = RREG32(mmMC_SHARED_CHMAP);
592
593 switch ((tmp & MC_SHARED_CHMAP__NOOFCHAN_MASK) >> MC_SHARED_CHMAP__NOOFCHAN__SHIFT) {
594 case 0:
595 default:
596 return 1;
597 case 1:
598 return 2;
599 case 2:
600 return 4;
601 case 3:
602 return 8;
603 case 4:
604 return 3;
605 case 5:
606 return 6;
607 case 6:
608 return 10;
609 case 7:
610 return 12;
611 case 8:
612 return 16;
613 }
614}
615
616struct dce8_wm_params {
617 u32 dram_channels;
618 u32 yclk;
619 u32 sclk;
620 u32 disp_clk;
621 u32 src_width;
622 u32 active_time;
623 u32 blank_time;
624 bool interlaced;
625 fixed20_12 vsc;
626 u32 num_heads;
627 u32 bytes_per_pixel;
628 u32 lb_size;
629 u32 vtaps;
630};
631
632
633
634
635
636
637
638
639
640
641static u32 dce_v8_0_dram_bandwidth(struct dce8_wm_params *wm)
642{
643
644 fixed20_12 dram_efficiency;
645 fixed20_12 yclk, dram_channels, bandwidth;
646 fixed20_12 a;
647
648 a.full = dfixed_const(1000);
649 yclk.full = dfixed_const(wm->yclk);
650 yclk.full = dfixed_div(yclk, a);
651 dram_channels.full = dfixed_const(wm->dram_channels * 4);
652 a.full = dfixed_const(10);
653 dram_efficiency.full = dfixed_const(7);
654 dram_efficiency.full = dfixed_div(dram_efficiency, a);
655 bandwidth.full = dfixed_mul(dram_channels, yclk);
656 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
657
658 return dfixed_trunc(bandwidth);
659}
660
661
662
663
664
665
666
667
668
669
670static u32 dce_v8_0_dram_bandwidth_for_display(struct dce8_wm_params *wm)
671{
672
673 fixed20_12 disp_dram_allocation;
674 fixed20_12 yclk, dram_channels, bandwidth;
675 fixed20_12 a;
676
677 a.full = dfixed_const(1000);
678 yclk.full = dfixed_const(wm->yclk);
679 yclk.full = dfixed_div(yclk, a);
680 dram_channels.full = dfixed_const(wm->dram_channels * 4);
681 a.full = dfixed_const(10);
682 disp_dram_allocation.full = dfixed_const(3);
683 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
684 bandwidth.full = dfixed_mul(dram_channels, yclk);
685 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
686
687 return dfixed_trunc(bandwidth);
688}
689
690
691
692
693
694
695
696
697
698
699static u32 dce_v8_0_data_return_bandwidth(struct dce8_wm_params *wm)
700{
701
702 fixed20_12 return_efficiency;
703 fixed20_12 sclk, bandwidth;
704 fixed20_12 a;
705
706 a.full = dfixed_const(1000);
707 sclk.full = dfixed_const(wm->sclk);
708 sclk.full = dfixed_div(sclk, a);
709 a.full = dfixed_const(10);
710 return_efficiency.full = dfixed_const(8);
711 return_efficiency.full = dfixed_div(return_efficiency, a);
712 a.full = dfixed_const(32);
713 bandwidth.full = dfixed_mul(a, sclk);
714 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
715
716 return dfixed_trunc(bandwidth);
717}
718
719
720
721
722
723
724
725
726
727
728static u32 dce_v8_0_dmif_request_bandwidth(struct dce8_wm_params *wm)
729{
730
731 fixed20_12 disp_clk_request_efficiency;
732 fixed20_12 disp_clk, bandwidth;
733 fixed20_12 a, b;
734
735 a.full = dfixed_const(1000);
736 disp_clk.full = dfixed_const(wm->disp_clk);
737 disp_clk.full = dfixed_div(disp_clk, a);
738 a.full = dfixed_const(32);
739 b.full = dfixed_mul(a, disp_clk);
740
741 a.full = dfixed_const(10);
742 disp_clk_request_efficiency.full = dfixed_const(8);
743 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
744
745 bandwidth.full = dfixed_mul(b, disp_clk_request_efficiency);
746
747 return dfixed_trunc(bandwidth);
748}
749
750
751
752
753
754
755
756
757
758
759static u32 dce_v8_0_available_bandwidth(struct dce8_wm_params *wm)
760{
761
762 u32 dram_bandwidth = dce_v8_0_dram_bandwidth(wm);
763 u32 data_return_bandwidth = dce_v8_0_data_return_bandwidth(wm);
764 u32 dmif_req_bandwidth = dce_v8_0_dmif_request_bandwidth(wm);
765
766 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
767}
768
769
770
771
772
773
774
775
776
777
778static u32 dce_v8_0_average_bandwidth(struct dce8_wm_params *wm)
779{
780
781
782
783
784 fixed20_12 bpp;
785 fixed20_12 line_time;
786 fixed20_12 src_width;
787 fixed20_12 bandwidth;
788 fixed20_12 a;
789
790 a.full = dfixed_const(1000);
791 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
792 line_time.full = dfixed_div(line_time, a);
793 bpp.full = dfixed_const(wm->bytes_per_pixel);
794 src_width.full = dfixed_const(wm->src_width);
795 bandwidth.full = dfixed_mul(src_width, bpp);
796 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
797 bandwidth.full = dfixed_div(bandwidth, line_time);
798
799 return dfixed_trunc(bandwidth);
800}
801
802
803
804
805
806
807
808
809
810
811static u32 dce_v8_0_latency_watermark(struct dce8_wm_params *wm)
812{
813
814 u32 mc_latency = 2000;
815 u32 available_bandwidth = dce_v8_0_available_bandwidth(wm);
816 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
817 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
818 u32 dc_latency = 40000000 / wm->disp_clk;
819 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
820 (wm->num_heads * cursor_line_pair_return_time);
821 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
822 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
823 u32 tmp, dmif_size = 12288;
824 fixed20_12 a, b, c;
825
826 if (wm->num_heads == 0)
827 return 0;
828
829 a.full = dfixed_const(2);
830 b.full = dfixed_const(1);
831 if ((wm->vsc.full > a.full) ||
832 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
833 (wm->vtaps >= 5) ||
834 ((wm->vsc.full >= a.full) && wm->interlaced))
835 max_src_lines_per_dst_line = 4;
836 else
837 max_src_lines_per_dst_line = 2;
838
839 a.full = dfixed_const(available_bandwidth);
840 b.full = dfixed_const(wm->num_heads);
841 a.full = dfixed_div(a, b);
842 tmp = div_u64((u64) dmif_size * (u64) wm->disp_clk, mc_latency + 512);
843 tmp = min(dfixed_trunc(a), tmp);
844
845 lb_fill_bw = min(tmp, wm->disp_clk * wm->bytes_per_pixel / 1000);
846
847 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
848 b.full = dfixed_const(1000);
849 c.full = dfixed_const(lb_fill_bw);
850 b.full = dfixed_div(c, b);
851 a.full = dfixed_div(a, b);
852 line_fill_time = dfixed_trunc(a);
853
854 if (line_fill_time < wm->active_time)
855 return latency;
856 else
857 return latency + (line_fill_time - wm->active_time);
858
859}
860
861
862
863
864
865
866
867
868
869
870
871
872static bool dce_v8_0_average_bandwidth_vs_dram_bandwidth_for_display(struct dce8_wm_params *wm)
873{
874 if (dce_v8_0_average_bandwidth(wm) <=
875 (dce_v8_0_dram_bandwidth_for_display(wm) / wm->num_heads))
876 return true;
877 else
878 return false;
879}
880
881
882
883
884
885
886
887
888
889
890
891
892static bool dce_v8_0_average_bandwidth_vs_available_bandwidth(struct dce8_wm_params *wm)
893{
894 if (dce_v8_0_average_bandwidth(wm) <=
895 (dce_v8_0_available_bandwidth(wm) / wm->num_heads))
896 return true;
897 else
898 return false;
899}
900
901
902
903
904
905
906
907
908
909
910static bool dce_v8_0_check_latency_hiding(struct dce8_wm_params *wm)
911{
912 u32 lb_partitions = wm->lb_size / wm->src_width;
913 u32 line_time = wm->active_time + wm->blank_time;
914 u32 latency_tolerant_lines;
915 u32 latency_hiding;
916 fixed20_12 a;
917
918 a.full = dfixed_const(1);
919 if (wm->vsc.full > a.full)
920 latency_tolerant_lines = 1;
921 else {
922 if (lb_partitions <= (wm->vtaps + 1))
923 latency_tolerant_lines = 1;
924 else
925 latency_tolerant_lines = 2;
926 }
927
928 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
929
930 if (dce_v8_0_latency_watermark(wm) <= latency_hiding)
931 return true;
932 else
933 return false;
934}
935
936
937
938
939
940
941
942
943
944
945
946
947static void dce_v8_0_program_watermarks(struct amdgpu_device *adev,
948 struct amdgpu_crtc *amdgpu_crtc,
949 u32 lb_size, u32 num_heads)
950{
951 struct drm_display_mode *mode = &amdgpu_crtc->base.mode;
952 struct dce8_wm_params wm_low, wm_high;
953 u32 active_time;
954 u32 line_time = 0;
955 u32 latency_watermark_a = 0, latency_watermark_b = 0;
956 u32 tmp, wm_mask, lb_vblank_lead_lines = 0;
957
958 if (amdgpu_crtc->base.enabled && num_heads && mode) {
959 active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000,
960 (u32)mode->clock);
961 line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000,
962 (u32)mode->clock);
963 line_time = min(line_time, (u32)65535);
964
965
966 if (adev->pm.dpm_enabled) {
967 wm_high.yclk =
968 amdgpu_dpm_get_mclk(adev, false) * 10;
969 wm_high.sclk =
970 amdgpu_dpm_get_sclk(adev, false) * 10;
971 } else {
972 wm_high.yclk = adev->pm.current_mclk * 10;
973 wm_high.sclk = adev->pm.current_sclk * 10;
974 }
975
976 wm_high.disp_clk = mode->clock;
977 wm_high.src_width = mode->crtc_hdisplay;
978 wm_high.active_time = active_time;
979 wm_high.blank_time = line_time - wm_high.active_time;
980 wm_high.interlaced = false;
981 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
982 wm_high.interlaced = true;
983 wm_high.vsc = amdgpu_crtc->vsc;
984 wm_high.vtaps = 1;
985 if (amdgpu_crtc->rmx_type != RMX_OFF)
986 wm_high.vtaps = 2;
987 wm_high.bytes_per_pixel = 4;
988 wm_high.lb_size = lb_size;
989 wm_high.dram_channels = cik_get_number_of_dram_channels(adev);
990 wm_high.num_heads = num_heads;
991
992
993 latency_watermark_a = min(dce_v8_0_latency_watermark(&wm_high), (u32)65535);
994
995
996
997 if (!dce_v8_0_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
998 !dce_v8_0_average_bandwidth_vs_available_bandwidth(&wm_high) ||
999 !dce_v8_0_check_latency_hiding(&wm_high) ||
1000 (adev->mode_info.disp_priority == 2)) {
1001 DRM_DEBUG_KMS("force priority to high\n");
1002 }
1003
1004
1005 if (adev->pm.dpm_enabled) {
1006 wm_low.yclk =
1007 amdgpu_dpm_get_mclk(adev, true) * 10;
1008 wm_low.sclk =
1009 amdgpu_dpm_get_sclk(adev, true) * 10;
1010 } else {
1011 wm_low.yclk = adev->pm.current_mclk * 10;
1012 wm_low.sclk = adev->pm.current_sclk * 10;
1013 }
1014
1015 wm_low.disp_clk = mode->clock;
1016 wm_low.src_width = mode->crtc_hdisplay;
1017 wm_low.active_time = active_time;
1018 wm_low.blank_time = line_time - wm_low.active_time;
1019 wm_low.interlaced = false;
1020 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1021 wm_low.interlaced = true;
1022 wm_low.vsc = amdgpu_crtc->vsc;
1023 wm_low.vtaps = 1;
1024 if (amdgpu_crtc->rmx_type != RMX_OFF)
1025 wm_low.vtaps = 2;
1026 wm_low.bytes_per_pixel = 4;
1027 wm_low.lb_size = lb_size;
1028 wm_low.dram_channels = cik_get_number_of_dram_channels(adev);
1029 wm_low.num_heads = num_heads;
1030
1031
1032 latency_watermark_b = min(dce_v8_0_latency_watermark(&wm_low), (u32)65535);
1033
1034
1035
1036 if (!dce_v8_0_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
1037 !dce_v8_0_average_bandwidth_vs_available_bandwidth(&wm_low) ||
1038 !dce_v8_0_check_latency_hiding(&wm_low) ||
1039 (adev->mode_info.disp_priority == 2)) {
1040 DRM_DEBUG_KMS("force priority to high\n");
1041 }
1042 lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
1043 }
1044
1045
1046 wm_mask = RREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset);
1047 tmp = wm_mask;
1048 tmp &= ~(3 << DPG_WATERMARK_MASK_CONTROL__URGENCY_WATERMARK_MASK__SHIFT);
1049 tmp |= (1 << DPG_WATERMARK_MASK_CONTROL__URGENCY_WATERMARK_MASK__SHIFT);
1050 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp);
1051 WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset,
1052 ((latency_watermark_a << DPG_PIPE_URGENCY_CONTROL__URGENCY_LOW_WATERMARK__SHIFT) |
1053 (line_time << DPG_PIPE_URGENCY_CONTROL__URGENCY_HIGH_WATERMARK__SHIFT)));
1054
1055 tmp = RREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset);
1056 tmp &= ~(3 << DPG_WATERMARK_MASK_CONTROL__URGENCY_WATERMARK_MASK__SHIFT);
1057 tmp |= (2 << DPG_WATERMARK_MASK_CONTROL__URGENCY_WATERMARK_MASK__SHIFT);
1058 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, tmp);
1059 WREG32(mmDPG_PIPE_URGENCY_CONTROL + amdgpu_crtc->crtc_offset,
1060 ((latency_watermark_b << DPG_PIPE_URGENCY_CONTROL__URGENCY_LOW_WATERMARK__SHIFT) |
1061 (line_time << DPG_PIPE_URGENCY_CONTROL__URGENCY_HIGH_WATERMARK__SHIFT)));
1062
1063 WREG32(mmDPG_WATERMARK_MASK_CONTROL + amdgpu_crtc->crtc_offset, wm_mask);
1064
1065
1066 amdgpu_crtc->line_time = line_time;
1067 amdgpu_crtc->wm_high = latency_watermark_a;
1068 amdgpu_crtc->wm_low = latency_watermark_b;
1069
1070 amdgpu_crtc->lb_vblank_lead_lines = lb_vblank_lead_lines;
1071}
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081static void dce_v8_0_bandwidth_update(struct amdgpu_device *adev)
1082{
1083 struct drm_display_mode *mode = NULL;
1084 u32 num_heads = 0, lb_size;
1085 int i;
1086
1087 amdgpu_display_update_priority(adev);
1088
1089 for (i = 0; i < adev->mode_info.num_crtc; i++) {
1090 if (adev->mode_info.crtcs[i]->base.enabled)
1091 num_heads++;
1092 }
1093 for (i = 0; i < adev->mode_info.num_crtc; i++) {
1094 mode = &adev->mode_info.crtcs[i]->base.mode;
1095 lb_size = dce_v8_0_line_buffer_adjust(adev, adev->mode_info.crtcs[i], mode);
1096 dce_v8_0_program_watermarks(adev, adev->mode_info.crtcs[i],
1097 lb_size, num_heads);
1098 }
1099}
1100
1101static void dce_v8_0_audio_get_connected_pins(struct amdgpu_device *adev)
1102{
1103 int i;
1104 u32 offset, tmp;
1105
1106 for (i = 0; i < adev->mode_info.audio.num_pins; i++) {
1107 offset = adev->mode_info.audio.pin[i].offset;
1108 tmp = RREG32_AUDIO_ENDPT(offset,
1109 ixAZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT);
1110 if (((tmp &
1111 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT__PORT_CONNECTIVITY_MASK) >>
1112 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT__PORT_CONNECTIVITY__SHIFT) == 1)
1113 adev->mode_info.audio.pin[i].connected = false;
1114 else
1115 adev->mode_info.audio.pin[i].connected = true;
1116 }
1117}
1118
1119static struct amdgpu_audio_pin *dce_v8_0_audio_get_pin(struct amdgpu_device *adev)
1120{
1121 int i;
1122
1123 dce_v8_0_audio_get_connected_pins(adev);
1124
1125 for (i = 0; i < adev->mode_info.audio.num_pins; i++) {
1126 if (adev->mode_info.audio.pin[i].connected)
1127 return &adev->mode_info.audio.pin[i];
1128 }
1129 DRM_ERROR("No connected audio pins found!\n");
1130 return NULL;
1131}
1132
1133static void dce_v8_0_afmt_audio_select_pin(struct drm_encoder *encoder)
1134{
1135 struct amdgpu_device *adev = encoder->dev->dev_private;
1136 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1137 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1138 u32 offset;
1139
1140 if (!dig || !dig->afmt || !dig->afmt->pin)
1141 return;
1142
1143 offset = dig->afmt->offset;
1144
1145 WREG32(mmAFMT_AUDIO_SRC_CONTROL + offset,
1146 (dig->afmt->pin->id << AFMT_AUDIO_SRC_CONTROL__AFMT_AUDIO_SRC_SELECT__SHIFT));
1147}
1148
1149static void dce_v8_0_audio_write_latency_fields(struct drm_encoder *encoder,
1150 struct drm_display_mode *mode)
1151{
1152 struct amdgpu_device *adev = encoder->dev->dev_private;
1153 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1154 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1155 struct drm_connector *connector;
1156 struct amdgpu_connector *amdgpu_connector = NULL;
1157 u32 tmp = 0, offset;
1158
1159 if (!dig || !dig->afmt || !dig->afmt->pin)
1160 return;
1161
1162 offset = dig->afmt->pin->offset;
1163
1164 list_for_each_entry(connector, &encoder->dev->mode_config.connector_list, head) {
1165 if (connector->encoder == encoder) {
1166 amdgpu_connector = to_amdgpu_connector(connector);
1167 break;
1168 }
1169 }
1170
1171 if (!amdgpu_connector) {
1172 DRM_ERROR("Couldn't find encoder's connector\n");
1173 return;
1174 }
1175
1176 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1177 if (connector->latency_present[1])
1178 tmp =
1179 (connector->video_latency[1] <<
1180 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__VIDEO_LIPSYNC__SHIFT) |
1181 (connector->audio_latency[1] <<
1182 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__AUDIO_LIPSYNC__SHIFT);
1183 else
1184 tmp =
1185 (0 <<
1186 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__VIDEO_LIPSYNC__SHIFT) |
1187 (0 <<
1188 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__AUDIO_LIPSYNC__SHIFT);
1189 } else {
1190 if (connector->latency_present[0])
1191 tmp =
1192 (connector->video_latency[0] <<
1193 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__VIDEO_LIPSYNC__SHIFT) |
1194 (connector->audio_latency[0] <<
1195 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__AUDIO_LIPSYNC__SHIFT);
1196 else
1197 tmp =
1198 (0 <<
1199 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__VIDEO_LIPSYNC__SHIFT) |
1200 (0 <<
1201 AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC__AUDIO_LIPSYNC__SHIFT);
1202
1203 }
1204 WREG32_AUDIO_ENDPT(offset, ixAZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_LIPSYNC, tmp);
1205}
1206
1207static void dce_v8_0_audio_write_speaker_allocation(struct drm_encoder *encoder)
1208{
1209 struct amdgpu_device *adev = encoder->dev->dev_private;
1210 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1211 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1212 struct drm_connector *connector;
1213 struct amdgpu_connector *amdgpu_connector = NULL;
1214 u32 offset, tmp;
1215 u8 *sadb = NULL;
1216 int sad_count;
1217
1218 if (!dig || !dig->afmt || !dig->afmt->pin)
1219 return;
1220
1221 offset = dig->afmt->pin->offset;
1222
1223 list_for_each_entry(connector, &encoder->dev->mode_config.connector_list, head) {
1224 if (connector->encoder == encoder) {
1225 amdgpu_connector = to_amdgpu_connector(connector);
1226 break;
1227 }
1228 }
1229
1230 if (!amdgpu_connector) {
1231 DRM_ERROR("Couldn't find encoder's connector\n");
1232 return;
1233 }
1234
1235 sad_count = drm_edid_to_speaker_allocation(amdgpu_connector_edid(connector), &sadb);
1236 if (sad_count < 0) {
1237 DRM_ERROR("Couldn't read Speaker Allocation Data Block: %d\n", sad_count);
1238 sad_count = 0;
1239 }
1240
1241
1242 tmp = RREG32_AUDIO_ENDPT(offset, ixAZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER);
1243 tmp &= ~(AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER__DP_CONNECTION_MASK |
1244 AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER__SPEAKER_ALLOCATION_MASK);
1245
1246 tmp |= AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER__HDMI_CONNECTION_MASK;
1247 if (sad_count)
1248 tmp |= (sadb[0] << AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER__SPEAKER_ALLOCATION__SHIFT);
1249 else
1250 tmp |= (5 << AZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER__SPEAKER_ALLOCATION__SHIFT);
1251 WREG32_AUDIO_ENDPT(offset, ixAZALIA_F0_CODEC_PIN_CONTROL_CHANNEL_SPEAKER, tmp);
1252
1253 kfree(sadb);
1254}
1255
1256static void dce_v8_0_audio_write_sad_regs(struct drm_encoder *encoder)
1257{
1258 struct amdgpu_device *adev = encoder->dev->dev_private;
1259 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1260 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1261 u32 offset;
1262 struct drm_connector *connector;
1263 struct amdgpu_connector *amdgpu_connector = NULL;
1264 struct cea_sad *sads;
1265 int i, sad_count;
1266
1267 static const u16 eld_reg_to_type[][2] = {
1268 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0, HDMI_AUDIO_CODING_TYPE_PCM },
1269 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR1, HDMI_AUDIO_CODING_TYPE_AC3 },
1270 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR2, HDMI_AUDIO_CODING_TYPE_MPEG1 },
1271 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR3, HDMI_AUDIO_CODING_TYPE_MP3 },
1272 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR4, HDMI_AUDIO_CODING_TYPE_MPEG2 },
1273 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR5, HDMI_AUDIO_CODING_TYPE_AAC_LC },
1274 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR6, HDMI_AUDIO_CODING_TYPE_DTS },
1275 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR7, HDMI_AUDIO_CODING_TYPE_ATRAC },
1276 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR9, HDMI_AUDIO_CODING_TYPE_EAC3 },
1277 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR10, HDMI_AUDIO_CODING_TYPE_DTS_HD },
1278 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR11, HDMI_AUDIO_CODING_TYPE_MLP },
1279 { ixAZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR13, HDMI_AUDIO_CODING_TYPE_WMA_PRO },
1280 };
1281
1282 if (!dig || !dig->afmt || !dig->afmt->pin)
1283 return;
1284
1285 offset = dig->afmt->pin->offset;
1286
1287 list_for_each_entry(connector, &encoder->dev->mode_config.connector_list, head) {
1288 if (connector->encoder == encoder) {
1289 amdgpu_connector = to_amdgpu_connector(connector);
1290 break;
1291 }
1292 }
1293
1294 if (!amdgpu_connector) {
1295 DRM_ERROR("Couldn't find encoder's connector\n");
1296 return;
1297 }
1298
1299 sad_count = drm_edid_to_sad(amdgpu_connector_edid(connector), &sads);
1300 if (sad_count <= 0) {
1301 DRM_ERROR("Couldn't read SADs: %d\n", sad_count);
1302 return;
1303 }
1304 BUG_ON(!sads);
1305
1306 for (i = 0; i < ARRAY_SIZE(eld_reg_to_type); i++) {
1307 u32 value = 0;
1308 u8 stereo_freqs = 0;
1309 int max_channels = -1;
1310 int j;
1311
1312 for (j = 0; j < sad_count; j++) {
1313 struct cea_sad *sad = &sads[j];
1314
1315 if (sad->format == eld_reg_to_type[i][1]) {
1316 if (sad->channels > max_channels) {
1317 value = (sad->channels <<
1318 AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0__MAX_CHANNELS__SHIFT) |
1319 (sad->byte2 <<
1320 AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0__DESCRIPTOR_BYTE_2__SHIFT) |
1321 (sad->freq <<
1322 AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0__SUPPORTED_FREQUENCIES__SHIFT);
1323 max_channels = sad->channels;
1324 }
1325
1326 if (sad->format == HDMI_AUDIO_CODING_TYPE_PCM)
1327 stereo_freqs |= sad->freq;
1328 else
1329 break;
1330 }
1331 }
1332
1333 value |= (stereo_freqs <<
1334 AZALIA_F0_CODEC_PIN_CONTROL_AUDIO_DESCRIPTOR0__SUPPORTED_FREQUENCIES_STEREO__SHIFT);
1335
1336 WREG32_AUDIO_ENDPT(offset, eld_reg_to_type[i][0], value);
1337 }
1338
1339 kfree(sads);
1340}
1341
1342static void dce_v8_0_audio_enable(struct amdgpu_device *adev,
1343 struct amdgpu_audio_pin *pin,
1344 bool enable)
1345{
1346 if (!pin)
1347 return;
1348
1349 WREG32_AUDIO_ENDPT(pin->offset, ixAZALIA_F0_CODEC_PIN_CONTROL_HOT_PLUG_CONTROL,
1350 enable ? AZALIA_F0_CODEC_PIN_CONTROL_HOT_PLUG_CONTROL__AUDIO_ENABLED_MASK : 0);
1351}
1352
1353static const u32 pin_offsets[7] =
1354{
1355 (0x1780 - 0x1780),
1356 (0x1786 - 0x1780),
1357 (0x178c - 0x1780),
1358 (0x1792 - 0x1780),
1359 (0x1798 - 0x1780),
1360 (0x179d - 0x1780),
1361 (0x17a4 - 0x1780),
1362};
1363
1364static int dce_v8_0_audio_init(struct amdgpu_device *adev)
1365{
1366 int i;
1367
1368 if (!amdgpu_audio)
1369 return 0;
1370
1371 adev->mode_info.audio.enabled = true;
1372
1373 if (adev->asic_type == CHIP_KAVERI)
1374 adev->mode_info.audio.num_pins = 7;
1375 else if ((adev->asic_type == CHIP_KABINI) ||
1376 (adev->asic_type == CHIP_MULLINS))
1377 adev->mode_info.audio.num_pins = 3;
1378 else if ((adev->asic_type == CHIP_BONAIRE) ||
1379 (adev->asic_type == CHIP_HAWAII))
1380 adev->mode_info.audio.num_pins = 7;
1381 else
1382 adev->mode_info.audio.num_pins = 3;
1383
1384 for (i = 0; i < adev->mode_info.audio.num_pins; i++) {
1385 adev->mode_info.audio.pin[i].channels = -1;
1386 adev->mode_info.audio.pin[i].rate = -1;
1387 adev->mode_info.audio.pin[i].bits_per_sample = -1;
1388 adev->mode_info.audio.pin[i].status_bits = 0;
1389 adev->mode_info.audio.pin[i].category_code = 0;
1390 adev->mode_info.audio.pin[i].connected = false;
1391 adev->mode_info.audio.pin[i].offset = pin_offsets[i];
1392 adev->mode_info.audio.pin[i].id = i;
1393
1394
1395 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false);
1396 }
1397
1398 return 0;
1399}
1400
1401static void dce_v8_0_audio_fini(struct amdgpu_device *adev)
1402{
1403 int i;
1404
1405 if (!amdgpu_audio)
1406 return;
1407
1408 if (!adev->mode_info.audio.enabled)
1409 return;
1410
1411 for (i = 0; i < adev->mode_info.audio.num_pins; i++)
1412 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false);
1413
1414 adev->mode_info.audio.enabled = false;
1415}
1416
1417
1418
1419
1420static void dce_v8_0_afmt_update_ACR(struct drm_encoder *encoder, uint32_t clock)
1421{
1422 struct drm_device *dev = encoder->dev;
1423 struct amdgpu_device *adev = dev->dev_private;
1424 struct amdgpu_afmt_acr acr = amdgpu_afmt_acr(clock);
1425 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1426 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1427 uint32_t offset = dig->afmt->offset;
1428
1429 WREG32(mmHDMI_ACR_32_0 + offset, (acr.cts_32khz << HDMI_ACR_32_0__HDMI_ACR_CTS_32__SHIFT));
1430 WREG32(mmHDMI_ACR_32_1 + offset, acr.n_32khz);
1431
1432 WREG32(mmHDMI_ACR_44_0 + offset, (acr.cts_44_1khz << HDMI_ACR_44_0__HDMI_ACR_CTS_44__SHIFT));
1433 WREG32(mmHDMI_ACR_44_1 + offset, acr.n_44_1khz);
1434
1435 WREG32(mmHDMI_ACR_48_0 + offset, (acr.cts_48khz << HDMI_ACR_48_0__HDMI_ACR_CTS_48__SHIFT));
1436 WREG32(mmHDMI_ACR_48_1 + offset, acr.n_48khz);
1437}
1438
1439
1440
1441
1442static void dce_v8_0_afmt_update_avi_infoframe(struct drm_encoder *encoder,
1443 void *buffer, size_t size)
1444{
1445 struct drm_device *dev = encoder->dev;
1446 struct amdgpu_device *adev = dev->dev_private;
1447 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1448 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1449 uint32_t offset = dig->afmt->offset;
1450 uint8_t *frame = buffer + 3;
1451 uint8_t *header = buffer;
1452
1453 WREG32(mmAFMT_AVI_INFO0 + offset,
1454 frame[0x0] | (frame[0x1] << 8) | (frame[0x2] << 16) | (frame[0x3] << 24));
1455 WREG32(mmAFMT_AVI_INFO1 + offset,
1456 frame[0x4] | (frame[0x5] << 8) | (frame[0x6] << 16) | (frame[0x7] << 24));
1457 WREG32(mmAFMT_AVI_INFO2 + offset,
1458 frame[0x8] | (frame[0x9] << 8) | (frame[0xA] << 16) | (frame[0xB] << 24));
1459 WREG32(mmAFMT_AVI_INFO3 + offset,
1460 frame[0xC] | (frame[0xD] << 8) | (header[1] << 24));
1461}
1462
1463static void dce_v8_0_audio_set_dto(struct drm_encoder *encoder, u32 clock)
1464{
1465 struct drm_device *dev = encoder->dev;
1466 struct amdgpu_device *adev = dev->dev_private;
1467 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1468 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1469 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1470 u32 dto_phase = 24 * 1000;
1471 u32 dto_modulo = clock;
1472
1473 if (!dig || !dig->afmt)
1474 return;
1475
1476
1477
1478
1479
1480
1481 WREG32(mmDCCG_AUDIO_DTO_SOURCE, (amdgpu_crtc->crtc_id << DCCG_AUDIO_DTO_SOURCE__DCCG_AUDIO_DTO0_SOURCE_SEL__SHIFT));
1482 WREG32(mmDCCG_AUDIO_DTO0_PHASE, dto_phase);
1483 WREG32(mmDCCG_AUDIO_DTO0_MODULE, dto_modulo);
1484}
1485
1486
1487
1488
1489static void dce_v8_0_afmt_setmode(struct drm_encoder *encoder,
1490 struct drm_display_mode *mode)
1491{
1492 struct drm_device *dev = encoder->dev;
1493 struct amdgpu_device *adev = dev->dev_private;
1494 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1495 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1496 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1497 u8 buffer[HDMI_INFOFRAME_HEADER_SIZE + HDMI_AVI_INFOFRAME_SIZE];
1498 struct hdmi_avi_infoframe frame;
1499 uint32_t offset, val;
1500 ssize_t err;
1501 int bpc = 8;
1502
1503 if (!dig || !dig->afmt)
1504 return;
1505
1506
1507 if (!dig->afmt->enabled)
1508 return;
1509
1510 offset = dig->afmt->offset;
1511
1512
1513 if (encoder->crtc) {
1514 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1515 bpc = amdgpu_crtc->bpc;
1516 }
1517
1518
1519 dig->afmt->pin = dce_v8_0_audio_get_pin(adev);
1520 dce_v8_0_audio_enable(adev, dig->afmt->pin, false);
1521
1522 dce_v8_0_audio_set_dto(encoder, mode->clock);
1523
1524 WREG32(mmHDMI_VBI_PACKET_CONTROL + offset,
1525 HDMI_VBI_PACKET_CONTROL__HDMI_NULL_SEND_MASK);
1526
1527 WREG32(mmAFMT_AUDIO_CRC_CONTROL + offset, 0x1000);
1528
1529 val = RREG32(mmHDMI_CONTROL + offset);
1530 val &= ~HDMI_CONTROL__HDMI_DEEP_COLOR_ENABLE_MASK;
1531 val &= ~HDMI_CONTROL__HDMI_DEEP_COLOR_DEPTH_MASK;
1532
1533 switch (bpc) {
1534 case 0:
1535 case 6:
1536 case 8:
1537 case 16:
1538 default:
1539 DRM_DEBUG("%s: Disabling hdmi deep color for %d bpc.\n",
1540 connector->name, bpc);
1541 break;
1542 case 10:
1543 val |= HDMI_CONTROL__HDMI_DEEP_COLOR_ENABLE_MASK;
1544 val |= 1 << HDMI_CONTROL__HDMI_DEEP_COLOR_DEPTH__SHIFT;
1545 DRM_DEBUG("%s: Enabling hdmi deep color 30 for 10 bpc.\n",
1546 connector->name);
1547 break;
1548 case 12:
1549 val |= HDMI_CONTROL__HDMI_DEEP_COLOR_ENABLE_MASK;
1550 val |= 2 << HDMI_CONTROL__HDMI_DEEP_COLOR_DEPTH__SHIFT;
1551 DRM_DEBUG("%s: Enabling hdmi deep color 36 for 12 bpc.\n",
1552 connector->name);
1553 break;
1554 }
1555
1556 WREG32(mmHDMI_CONTROL + offset, val);
1557
1558 WREG32(mmHDMI_VBI_PACKET_CONTROL + offset,
1559 HDMI_VBI_PACKET_CONTROL__HDMI_NULL_SEND_MASK |
1560 HDMI_VBI_PACKET_CONTROL__HDMI_GC_SEND_MASK |
1561 HDMI_VBI_PACKET_CONTROL__HDMI_GC_CONT_MASK);
1562
1563 WREG32(mmHDMI_INFOFRAME_CONTROL0 + offset,
1564 HDMI_INFOFRAME_CONTROL0__HDMI_AUDIO_INFO_SEND_MASK |
1565 HDMI_INFOFRAME_CONTROL0__HDMI_AUDIO_INFO_CONT_MASK);
1566
1567 WREG32(mmAFMT_INFOFRAME_CONTROL0 + offset,
1568 AFMT_INFOFRAME_CONTROL0__AFMT_AUDIO_INFO_UPDATE_MASK);
1569
1570 WREG32(mmHDMI_INFOFRAME_CONTROL1 + offset,
1571 (2 << HDMI_INFOFRAME_CONTROL1__HDMI_AUDIO_INFO_LINE__SHIFT));
1572
1573 WREG32(mmHDMI_GC + offset, 0);
1574
1575 WREG32(mmHDMI_AUDIO_PACKET_CONTROL + offset,
1576 (1 << HDMI_AUDIO_PACKET_CONTROL__HDMI_AUDIO_DELAY_EN__SHIFT) |
1577 (3 << HDMI_AUDIO_PACKET_CONTROL__HDMI_AUDIO_PACKETS_PER_LINE__SHIFT));
1578
1579 WREG32(mmAFMT_AUDIO_PACKET_CONTROL + offset,
1580 AFMT_AUDIO_PACKET_CONTROL__AFMT_60958_CS_UPDATE_MASK);
1581
1582
1583
1584 if (bpc > 8)
1585 WREG32(mmHDMI_ACR_PACKET_CONTROL + offset,
1586 HDMI_ACR_PACKET_CONTROL__HDMI_ACR_AUTO_SEND_MASK);
1587 else
1588 WREG32(mmHDMI_ACR_PACKET_CONTROL + offset,
1589 HDMI_ACR_PACKET_CONTROL__HDMI_ACR_SOURCE_MASK |
1590 HDMI_ACR_PACKET_CONTROL__HDMI_ACR_AUTO_SEND_MASK);
1591
1592 dce_v8_0_afmt_update_ACR(encoder, mode->clock);
1593
1594 WREG32(mmAFMT_60958_0 + offset,
1595 (1 << AFMT_60958_0__AFMT_60958_CS_CHANNEL_NUMBER_L__SHIFT));
1596
1597 WREG32(mmAFMT_60958_1 + offset,
1598 (2 << AFMT_60958_1__AFMT_60958_CS_CHANNEL_NUMBER_R__SHIFT));
1599
1600 WREG32(mmAFMT_60958_2 + offset,
1601 (3 << AFMT_60958_2__AFMT_60958_CS_CHANNEL_NUMBER_2__SHIFT) |
1602 (4 << AFMT_60958_2__AFMT_60958_CS_CHANNEL_NUMBER_3__SHIFT) |
1603 (5 << AFMT_60958_2__AFMT_60958_CS_CHANNEL_NUMBER_4__SHIFT) |
1604 (6 << AFMT_60958_2__AFMT_60958_CS_CHANNEL_NUMBER_5__SHIFT) |
1605 (7 << AFMT_60958_2__AFMT_60958_CS_CHANNEL_NUMBER_6__SHIFT) |
1606 (8 << AFMT_60958_2__AFMT_60958_CS_CHANNEL_NUMBER_7__SHIFT));
1607
1608 dce_v8_0_audio_write_speaker_allocation(encoder);
1609
1610
1611 WREG32(mmAFMT_AUDIO_PACKET_CONTROL2 + offset,
1612 (0xff << AFMT_AUDIO_PACKET_CONTROL2__AFMT_AUDIO_CHANNEL_ENABLE__SHIFT));
1613
1614 dce_v8_0_afmt_audio_select_pin(encoder);
1615 dce_v8_0_audio_write_sad_regs(encoder);
1616 dce_v8_0_audio_write_latency_fields(encoder, mode);
1617
1618 err = drm_hdmi_avi_infoframe_from_display_mode(&frame, mode, false);
1619 if (err < 0) {
1620 DRM_ERROR("failed to setup AVI infoframe: %zd\n", err);
1621 return;
1622 }
1623
1624 err = hdmi_avi_infoframe_pack(&frame, buffer, sizeof(buffer));
1625 if (err < 0) {
1626 DRM_ERROR("failed to pack AVI infoframe: %zd\n", err);
1627 return;
1628 }
1629
1630 dce_v8_0_afmt_update_avi_infoframe(encoder, buffer, sizeof(buffer));
1631
1632 WREG32_OR(mmHDMI_INFOFRAME_CONTROL0 + offset,
1633 HDMI_INFOFRAME_CONTROL0__HDMI_AVI_INFO_SEND_MASK |
1634 HDMI_INFOFRAME_CONTROL0__HDMI_AVI_INFO_CONT_MASK);
1635
1636 WREG32_P(mmHDMI_INFOFRAME_CONTROL1 + offset,
1637 (2 << HDMI_INFOFRAME_CONTROL1__HDMI_AVI_INFO_LINE__SHIFT),
1638 ~HDMI_INFOFRAME_CONTROL1__HDMI_AVI_INFO_LINE_MASK);
1639
1640 WREG32_OR(mmAFMT_AUDIO_PACKET_CONTROL + offset,
1641 AFMT_AUDIO_PACKET_CONTROL__AFMT_AUDIO_SAMPLE_SEND_MASK);
1642
1643 WREG32(mmAFMT_RAMP_CONTROL0 + offset, 0x00FFFFFF);
1644 WREG32(mmAFMT_RAMP_CONTROL1 + offset, 0x007FFFFF);
1645 WREG32(mmAFMT_RAMP_CONTROL2 + offset, 0x00000001);
1646 WREG32(mmAFMT_RAMP_CONTROL3 + offset, 0x00000001);
1647
1648
1649 dce_v8_0_audio_enable(adev, dig->afmt->pin, true);
1650}
1651
1652static void dce_v8_0_afmt_enable(struct drm_encoder *encoder, bool enable)
1653{
1654 struct drm_device *dev = encoder->dev;
1655 struct amdgpu_device *adev = dev->dev_private;
1656 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1657 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1658
1659 if (!dig || !dig->afmt)
1660 return;
1661
1662
1663 if (enable && dig->afmt->enabled)
1664 return;
1665 if (!enable && !dig->afmt->enabled)
1666 return;
1667
1668 if (!enable && dig->afmt->pin) {
1669 dce_v8_0_audio_enable(adev, dig->afmt->pin, false);
1670 dig->afmt->pin = NULL;
1671 }
1672
1673 dig->afmt->enabled = enable;
1674
1675 DRM_DEBUG("%sabling AFMT interface @ 0x%04X for encoder 0x%x\n",
1676 enable ? "En" : "Dis", dig->afmt->offset, amdgpu_encoder->encoder_id);
1677}
1678
1679static int dce_v8_0_afmt_init(struct amdgpu_device *adev)
1680{
1681 int i;
1682
1683 for (i = 0; i < adev->mode_info.num_dig; i++)
1684 adev->mode_info.afmt[i] = NULL;
1685
1686
1687 for (i = 0; i < adev->mode_info.num_dig; i++) {
1688 adev->mode_info.afmt[i] = kzalloc(sizeof(struct amdgpu_afmt), GFP_KERNEL);
1689 if (adev->mode_info.afmt[i]) {
1690 adev->mode_info.afmt[i]->offset = dig_offsets[i];
1691 adev->mode_info.afmt[i]->id = i;
1692 } else {
1693 int j;
1694 for (j = 0; j < i; j++) {
1695 kfree(adev->mode_info.afmt[j]);
1696 adev->mode_info.afmt[j] = NULL;
1697 }
1698 return -ENOMEM;
1699 }
1700 }
1701 return 0;
1702}
1703
1704static void dce_v8_0_afmt_fini(struct amdgpu_device *adev)
1705{
1706 int i;
1707
1708 for (i = 0; i < adev->mode_info.num_dig; i++) {
1709 kfree(adev->mode_info.afmt[i]);
1710 adev->mode_info.afmt[i] = NULL;
1711 }
1712}
1713
1714static const u32 vga_control_regs[6] =
1715{
1716 mmD1VGA_CONTROL,
1717 mmD2VGA_CONTROL,
1718 mmD3VGA_CONTROL,
1719 mmD4VGA_CONTROL,
1720 mmD5VGA_CONTROL,
1721 mmD6VGA_CONTROL,
1722};
1723
1724static void dce_v8_0_vga_enable(struct drm_crtc *crtc, bool enable)
1725{
1726 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
1727 struct drm_device *dev = crtc->dev;
1728 struct amdgpu_device *adev = dev->dev_private;
1729 u32 vga_control;
1730
1731 vga_control = RREG32(vga_control_regs[amdgpu_crtc->crtc_id]) & ~1;
1732 if (enable)
1733 WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control | 1);
1734 else
1735 WREG32(vga_control_regs[amdgpu_crtc->crtc_id], vga_control);
1736}
1737
1738static void dce_v8_0_grph_enable(struct drm_crtc *crtc, bool enable)
1739{
1740 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
1741 struct drm_device *dev = crtc->dev;
1742 struct amdgpu_device *adev = dev->dev_private;
1743
1744 if (enable)
1745 WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 1);
1746 else
1747 WREG32(mmGRPH_ENABLE + amdgpu_crtc->crtc_offset, 0);
1748}
1749
1750static int dce_v8_0_crtc_do_set_base(struct drm_crtc *crtc,
1751 struct drm_framebuffer *fb,
1752 int x, int y, int atomic)
1753{
1754 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
1755 struct drm_device *dev = crtc->dev;
1756 struct amdgpu_device *adev = dev->dev_private;
1757 struct drm_framebuffer *target_fb;
1758 struct drm_gem_object *obj;
1759 struct amdgpu_bo *abo;
1760 uint64_t fb_location, tiling_flags;
1761 uint32_t fb_format, fb_pitch_pixels;
1762 u32 fb_swap = (GRPH_ENDIAN_NONE << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1763 u32 pipe_config;
1764 u32 viewport_w, viewport_h;
1765 int r;
1766 bool bypass_lut = false;
1767 struct drm_format_name_buf format_name;
1768
1769
1770 if (!atomic && !crtc->primary->fb) {
1771 DRM_DEBUG_KMS("No FB bound\n");
1772 return 0;
1773 }
1774
1775 if (atomic)
1776 target_fb = fb;
1777 else
1778 target_fb = crtc->primary->fb;
1779
1780
1781
1782
1783 obj = target_fb->obj[0];
1784 abo = gem_to_amdgpu_bo(obj);
1785 r = amdgpu_bo_reserve(abo, false);
1786 if (unlikely(r != 0))
1787 return r;
1788
1789 if (!atomic) {
1790 r = amdgpu_bo_pin(abo, AMDGPU_GEM_DOMAIN_VRAM);
1791 if (unlikely(r != 0)) {
1792 amdgpu_bo_unreserve(abo);
1793 return -EINVAL;
1794 }
1795 }
1796 fb_location = amdgpu_bo_gpu_offset(abo);
1797
1798 amdgpu_bo_get_tiling_flags(abo, &tiling_flags);
1799 amdgpu_bo_unreserve(abo);
1800
1801 pipe_config = AMDGPU_TILING_GET(tiling_flags, PIPE_CONFIG);
1802
1803 switch (target_fb->format->format) {
1804 case DRM_FORMAT_C8:
1805 fb_format = ((GRPH_DEPTH_8BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1806 (GRPH_FORMAT_INDEXED << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1807 break;
1808 case DRM_FORMAT_XRGB4444:
1809 case DRM_FORMAT_ARGB4444:
1810 fb_format = ((GRPH_DEPTH_16BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1811 (GRPH_FORMAT_ARGB4444 << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1812#ifdef __BIG_ENDIAN
1813 fb_swap = (GRPH_ENDIAN_8IN16 << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1814#endif
1815 break;
1816 case DRM_FORMAT_XRGB1555:
1817 case DRM_FORMAT_ARGB1555:
1818 fb_format = ((GRPH_DEPTH_16BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1819 (GRPH_FORMAT_ARGB1555 << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1820#ifdef __BIG_ENDIAN
1821 fb_swap = (GRPH_ENDIAN_8IN16 << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1822#endif
1823 break;
1824 case DRM_FORMAT_BGRX5551:
1825 case DRM_FORMAT_BGRA5551:
1826 fb_format = ((GRPH_DEPTH_16BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1827 (GRPH_FORMAT_BGRA5551 << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1828#ifdef __BIG_ENDIAN
1829 fb_swap = (GRPH_ENDIAN_8IN16 << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1830#endif
1831 break;
1832 case DRM_FORMAT_RGB565:
1833 fb_format = ((GRPH_DEPTH_16BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1834 (GRPH_FORMAT_ARGB565 << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1835#ifdef __BIG_ENDIAN
1836 fb_swap = (GRPH_ENDIAN_8IN16 << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1837#endif
1838 break;
1839 case DRM_FORMAT_XRGB8888:
1840 case DRM_FORMAT_ARGB8888:
1841 fb_format = ((GRPH_DEPTH_32BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1842 (GRPH_FORMAT_ARGB8888 << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1843#ifdef __BIG_ENDIAN
1844 fb_swap = (GRPH_ENDIAN_8IN32 << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1845#endif
1846 break;
1847 case DRM_FORMAT_XRGB2101010:
1848 case DRM_FORMAT_ARGB2101010:
1849 fb_format = ((GRPH_DEPTH_32BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1850 (GRPH_FORMAT_ARGB2101010 << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1851#ifdef __BIG_ENDIAN
1852 fb_swap = (GRPH_ENDIAN_8IN32 << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1853#endif
1854
1855 bypass_lut = true;
1856 break;
1857 case DRM_FORMAT_BGRX1010102:
1858 case DRM_FORMAT_BGRA1010102:
1859 fb_format = ((GRPH_DEPTH_32BPP << GRPH_CONTROL__GRPH_DEPTH__SHIFT) |
1860 (GRPH_FORMAT_BGRA1010102 << GRPH_CONTROL__GRPH_FORMAT__SHIFT));
1861#ifdef __BIG_ENDIAN
1862 fb_swap = (GRPH_ENDIAN_8IN32 << GRPH_SWAP_CNTL__GRPH_ENDIAN_SWAP__SHIFT);
1863#endif
1864
1865 bypass_lut = true;
1866 break;
1867 default:
1868 DRM_ERROR("Unsupported screen format %s\n",
1869 drm_get_format_name(target_fb->format->format, &format_name));
1870 return -EINVAL;
1871 }
1872
1873 if (AMDGPU_TILING_GET(tiling_flags, ARRAY_MODE) == ARRAY_2D_TILED_THIN1) {
1874 unsigned bankw, bankh, mtaspect, tile_split, num_banks;
1875
1876 bankw = AMDGPU_TILING_GET(tiling_flags, BANK_WIDTH);
1877 bankh = AMDGPU_TILING_GET(tiling_flags, BANK_HEIGHT);
1878 mtaspect = AMDGPU_TILING_GET(tiling_flags, MACRO_TILE_ASPECT);
1879 tile_split = AMDGPU_TILING_GET(tiling_flags, TILE_SPLIT);
1880 num_banks = AMDGPU_TILING_GET(tiling_flags, NUM_BANKS);
1881
1882 fb_format |= (num_banks << GRPH_CONTROL__GRPH_NUM_BANKS__SHIFT);
1883 fb_format |= (GRPH_ARRAY_2D_TILED_THIN1 << GRPH_CONTROL__GRPH_ARRAY_MODE__SHIFT);
1884 fb_format |= (tile_split << GRPH_CONTROL__GRPH_TILE_SPLIT__SHIFT);
1885 fb_format |= (bankw << GRPH_CONTROL__GRPH_BANK_WIDTH__SHIFT);
1886 fb_format |= (bankh << GRPH_CONTROL__GRPH_BANK_HEIGHT__SHIFT);
1887 fb_format |= (mtaspect << GRPH_CONTROL__GRPH_MACRO_TILE_ASPECT__SHIFT);
1888 fb_format |= (DISPLAY_MICRO_TILING << GRPH_CONTROL__GRPH_MICRO_TILE_MODE__SHIFT);
1889 } else if (AMDGPU_TILING_GET(tiling_flags, ARRAY_MODE) == ARRAY_1D_TILED_THIN1) {
1890 fb_format |= (GRPH_ARRAY_1D_TILED_THIN1 << GRPH_CONTROL__GRPH_ARRAY_MODE__SHIFT);
1891 }
1892
1893 fb_format |= (pipe_config << GRPH_CONTROL__GRPH_PIPE_CONFIG__SHIFT);
1894
1895 dce_v8_0_vga_enable(crtc, false);
1896
1897
1898
1899
1900 WREG32(mmGRPH_FLIP_CONTROL + amdgpu_crtc->crtc_offset, 0);
1901
1902 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset,
1903 upper_32_bits(fb_location));
1904 WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset,
1905 upper_32_bits(fb_location));
1906 WREG32(mmGRPH_PRIMARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset,
1907 (u32)fb_location & GRPH_PRIMARY_SURFACE_ADDRESS__GRPH_PRIMARY_SURFACE_ADDRESS_MASK);
1908 WREG32(mmGRPH_SECONDARY_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset,
1909 (u32) fb_location & GRPH_SECONDARY_SURFACE_ADDRESS__GRPH_SECONDARY_SURFACE_ADDRESS_MASK);
1910 WREG32(mmGRPH_CONTROL + amdgpu_crtc->crtc_offset, fb_format);
1911 WREG32(mmGRPH_SWAP_CNTL + amdgpu_crtc->crtc_offset, fb_swap);
1912
1913
1914
1915
1916
1917
1918 WREG32_P(mmGRPH_LUT_10BIT_BYPASS_CONTROL + amdgpu_crtc->crtc_offset,
1919 (bypass_lut ? LUT_10BIT_BYPASS_EN : 0),
1920 ~LUT_10BIT_BYPASS_EN);
1921
1922 if (bypass_lut)
1923 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1924
1925 WREG32(mmGRPH_SURFACE_OFFSET_X + amdgpu_crtc->crtc_offset, 0);
1926 WREG32(mmGRPH_SURFACE_OFFSET_Y + amdgpu_crtc->crtc_offset, 0);
1927 WREG32(mmGRPH_X_START + amdgpu_crtc->crtc_offset, 0);
1928 WREG32(mmGRPH_Y_START + amdgpu_crtc->crtc_offset, 0);
1929 WREG32(mmGRPH_X_END + amdgpu_crtc->crtc_offset, target_fb->width);
1930 WREG32(mmGRPH_Y_END + amdgpu_crtc->crtc_offset, target_fb->height);
1931
1932 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0];
1933 WREG32(mmGRPH_PITCH + amdgpu_crtc->crtc_offset, fb_pitch_pixels);
1934
1935 dce_v8_0_grph_enable(crtc, true);
1936
1937 WREG32(mmLB_DESKTOP_HEIGHT + amdgpu_crtc->crtc_offset,
1938 target_fb->height);
1939
1940 x &= ~3;
1941 y &= ~1;
1942 WREG32(mmVIEWPORT_START + amdgpu_crtc->crtc_offset,
1943 (x << 16) | y);
1944 viewport_w = crtc->mode.hdisplay;
1945 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1946 WREG32(mmVIEWPORT_SIZE + amdgpu_crtc->crtc_offset,
1947 (viewport_w << 16) | viewport_h);
1948
1949
1950 WREG32(mmMASTER_UPDATE_MODE + amdgpu_crtc->crtc_offset, 0);
1951
1952 if (!atomic && fb && fb != crtc->primary->fb) {
1953 abo = gem_to_amdgpu_bo(fb->obj[0]);
1954 r = amdgpu_bo_reserve(abo, true);
1955 if (unlikely(r != 0))
1956 return r;
1957 amdgpu_bo_unpin(abo);
1958 amdgpu_bo_unreserve(abo);
1959 }
1960
1961
1962 dce_v8_0_bandwidth_update(adev);
1963
1964 return 0;
1965}
1966
1967static void dce_v8_0_set_interleave(struct drm_crtc *crtc,
1968 struct drm_display_mode *mode)
1969{
1970 struct drm_device *dev = crtc->dev;
1971 struct amdgpu_device *adev = dev->dev_private;
1972 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
1973
1974 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1975 WREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset,
1976 LB_DATA_FORMAT__INTERLEAVE_EN__SHIFT);
1977 else
1978 WREG32(mmLB_DATA_FORMAT + amdgpu_crtc->crtc_offset, 0);
1979}
1980
1981static void dce_v8_0_crtc_load_lut(struct drm_crtc *crtc)
1982{
1983 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
1984 struct drm_device *dev = crtc->dev;
1985 struct amdgpu_device *adev = dev->dev_private;
1986 u16 *r, *g, *b;
1987 int i;
1988
1989 DRM_DEBUG_KMS("%d\n", amdgpu_crtc->crtc_id);
1990
1991 WREG32(mmINPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset,
1992 ((INPUT_CSC_BYPASS << INPUT_CSC_CONTROL__INPUT_CSC_GRPH_MODE__SHIFT) |
1993 (INPUT_CSC_BYPASS << INPUT_CSC_CONTROL__INPUT_CSC_OVL_MODE__SHIFT)));
1994 WREG32(mmPRESCALE_GRPH_CONTROL + amdgpu_crtc->crtc_offset,
1995 PRESCALE_GRPH_CONTROL__GRPH_PRESCALE_BYPASS_MASK);
1996 WREG32(mmPRESCALE_OVL_CONTROL + amdgpu_crtc->crtc_offset,
1997 PRESCALE_OVL_CONTROL__OVL_PRESCALE_BYPASS_MASK);
1998 WREG32(mmINPUT_GAMMA_CONTROL + amdgpu_crtc->crtc_offset,
1999 ((INPUT_GAMMA_USE_LUT << INPUT_GAMMA_CONTROL__GRPH_INPUT_GAMMA_MODE__SHIFT) |
2000 (INPUT_GAMMA_USE_LUT << INPUT_GAMMA_CONTROL__OVL_INPUT_GAMMA_MODE__SHIFT)));
2001
2002 WREG32(mmDC_LUT_CONTROL + amdgpu_crtc->crtc_offset, 0);
2003
2004 WREG32(mmDC_LUT_BLACK_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0);
2005 WREG32(mmDC_LUT_BLACK_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0);
2006 WREG32(mmDC_LUT_BLACK_OFFSET_RED + amdgpu_crtc->crtc_offset, 0);
2007
2008 WREG32(mmDC_LUT_WHITE_OFFSET_BLUE + amdgpu_crtc->crtc_offset, 0xffff);
2009 WREG32(mmDC_LUT_WHITE_OFFSET_GREEN + amdgpu_crtc->crtc_offset, 0xffff);
2010 WREG32(mmDC_LUT_WHITE_OFFSET_RED + amdgpu_crtc->crtc_offset, 0xffff);
2011
2012 WREG32(mmDC_LUT_RW_MODE + amdgpu_crtc->crtc_offset, 0);
2013 WREG32(mmDC_LUT_WRITE_EN_MASK + amdgpu_crtc->crtc_offset, 0x00000007);
2014
2015 WREG32(mmDC_LUT_RW_INDEX + amdgpu_crtc->crtc_offset, 0);
2016 r = crtc->gamma_store;
2017 g = r + crtc->gamma_size;
2018 b = g + crtc->gamma_size;
2019 for (i = 0; i < 256; i++) {
2020 WREG32(mmDC_LUT_30_COLOR + amdgpu_crtc->crtc_offset,
2021 ((*r++ & 0xffc0) << 14) |
2022 ((*g++ & 0xffc0) << 4) |
2023 (*b++ >> 6));
2024 }
2025
2026 WREG32(mmDEGAMMA_CONTROL + amdgpu_crtc->crtc_offset,
2027 ((DEGAMMA_BYPASS << DEGAMMA_CONTROL__GRPH_DEGAMMA_MODE__SHIFT) |
2028 (DEGAMMA_BYPASS << DEGAMMA_CONTROL__OVL_DEGAMMA_MODE__SHIFT) |
2029 (DEGAMMA_BYPASS << DEGAMMA_CONTROL__CURSOR_DEGAMMA_MODE__SHIFT)));
2030 WREG32(mmGAMUT_REMAP_CONTROL + amdgpu_crtc->crtc_offset,
2031 ((GAMUT_REMAP_BYPASS << GAMUT_REMAP_CONTROL__GRPH_GAMUT_REMAP_MODE__SHIFT) |
2032 (GAMUT_REMAP_BYPASS << GAMUT_REMAP_CONTROL__OVL_GAMUT_REMAP_MODE__SHIFT)));
2033 WREG32(mmREGAMMA_CONTROL + amdgpu_crtc->crtc_offset,
2034 ((REGAMMA_BYPASS << REGAMMA_CONTROL__GRPH_REGAMMA_MODE__SHIFT) |
2035 (REGAMMA_BYPASS << REGAMMA_CONTROL__OVL_REGAMMA_MODE__SHIFT)));
2036 WREG32(mmOUTPUT_CSC_CONTROL + amdgpu_crtc->crtc_offset,
2037 ((OUTPUT_CSC_BYPASS << OUTPUT_CSC_CONTROL__OUTPUT_CSC_GRPH_MODE__SHIFT) |
2038 (OUTPUT_CSC_BYPASS << OUTPUT_CSC_CONTROL__OUTPUT_CSC_OVL_MODE__SHIFT)));
2039
2040 WREG32(0x1a50 + amdgpu_crtc->crtc_offset, 0);
2041
2042
2043
2044 WREG32(mmALPHA_CONTROL + amdgpu_crtc->crtc_offset,
2045 ALPHA_CONTROL__CURSOR_ALPHA_BLND_ENA_MASK);
2046}
2047
2048static int dce_v8_0_pick_dig_encoder(struct drm_encoder *encoder)
2049{
2050 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
2051 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
2052
2053 switch (amdgpu_encoder->encoder_id) {
2054 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
2055 if (dig->linkb)
2056 return 1;
2057 else
2058 return 0;
2059 break;
2060 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
2061 if (dig->linkb)
2062 return 3;
2063 else
2064 return 2;
2065 break;
2066 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
2067 if (dig->linkb)
2068 return 5;
2069 else
2070 return 4;
2071 break;
2072 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
2073 return 6;
2074 break;
2075 default:
2076 DRM_ERROR("invalid encoder_id: 0x%x\n", amdgpu_encoder->encoder_id);
2077 return 0;
2078 }
2079}
2080
2081
2082
2083
2084
2085
2086
2087
2088
2089
2090
2091
2092
2093
2094
2095
2096
2097
2098
2099
2100
2101
2102
2103static u32 dce_v8_0_pick_pll(struct drm_crtc *crtc)
2104{
2105 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2106 struct drm_device *dev = crtc->dev;
2107 struct amdgpu_device *adev = dev->dev_private;
2108 u32 pll_in_use;
2109 int pll;
2110
2111 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder))) {
2112 if (adev->clock.dp_extclk)
2113
2114 return ATOM_PPLL_INVALID;
2115 else {
2116
2117 pll = amdgpu_pll_get_shared_dp_ppll(crtc);
2118 if (pll != ATOM_PPLL_INVALID)
2119 return pll;
2120 }
2121 } else {
2122
2123 pll = amdgpu_pll_get_shared_nondp_ppll(crtc);
2124 if (pll != ATOM_PPLL_INVALID)
2125 return pll;
2126 }
2127
2128 if ((adev->asic_type == CHIP_KABINI) ||
2129 (adev->asic_type == CHIP_MULLINS)) {
2130
2131 pll_in_use = amdgpu_pll_get_use_mask(crtc);
2132 if (!(pll_in_use & (1 << ATOM_PPLL2)))
2133 return ATOM_PPLL2;
2134 if (!(pll_in_use & (1 << ATOM_PPLL1)))
2135 return ATOM_PPLL1;
2136 DRM_ERROR("unable to allocate a PPLL\n");
2137 return ATOM_PPLL_INVALID;
2138 } else {
2139
2140 pll_in_use = amdgpu_pll_get_use_mask(crtc);
2141 if (!(pll_in_use & (1 << ATOM_PPLL2)))
2142 return ATOM_PPLL2;
2143 if (!(pll_in_use & (1 << ATOM_PPLL1)))
2144 return ATOM_PPLL1;
2145 if (!(pll_in_use & (1 << ATOM_PPLL0)))
2146 return ATOM_PPLL0;
2147 DRM_ERROR("unable to allocate a PPLL\n");
2148 return ATOM_PPLL_INVALID;
2149 }
2150 return ATOM_PPLL_INVALID;
2151}
2152
2153static void dce_v8_0_lock_cursor(struct drm_crtc *crtc, bool lock)
2154{
2155 struct amdgpu_device *adev = crtc->dev->dev_private;
2156 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2157 uint32_t cur_lock;
2158
2159 cur_lock = RREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset);
2160 if (lock)
2161 cur_lock |= CUR_UPDATE__CURSOR_UPDATE_LOCK_MASK;
2162 else
2163 cur_lock &= ~CUR_UPDATE__CURSOR_UPDATE_LOCK_MASK;
2164 WREG32(mmCUR_UPDATE + amdgpu_crtc->crtc_offset, cur_lock);
2165}
2166
2167static void dce_v8_0_hide_cursor(struct drm_crtc *crtc)
2168{
2169 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2170 struct amdgpu_device *adev = crtc->dev->dev_private;
2171
2172 WREG32_IDX(mmCUR_CONTROL + amdgpu_crtc->crtc_offset,
2173 (CURSOR_24_8_PRE_MULT << CUR_CONTROL__CURSOR_MODE__SHIFT) |
2174 (CURSOR_URGENT_1_2 << CUR_CONTROL__CURSOR_URGENT_CONTROL__SHIFT));
2175}
2176
2177static void dce_v8_0_show_cursor(struct drm_crtc *crtc)
2178{
2179 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2180 struct amdgpu_device *adev = crtc->dev->dev_private;
2181
2182 WREG32(mmCUR_SURFACE_ADDRESS_HIGH + amdgpu_crtc->crtc_offset,
2183 upper_32_bits(amdgpu_crtc->cursor_addr));
2184 WREG32(mmCUR_SURFACE_ADDRESS + amdgpu_crtc->crtc_offset,
2185 lower_32_bits(amdgpu_crtc->cursor_addr));
2186
2187 WREG32_IDX(mmCUR_CONTROL + amdgpu_crtc->crtc_offset,
2188 CUR_CONTROL__CURSOR_EN_MASK |
2189 (CURSOR_24_8_PRE_MULT << CUR_CONTROL__CURSOR_MODE__SHIFT) |
2190 (CURSOR_URGENT_1_2 << CUR_CONTROL__CURSOR_URGENT_CONTROL__SHIFT));
2191}
2192
2193static int dce_v8_0_cursor_move_locked(struct drm_crtc *crtc,
2194 int x, int y)
2195{
2196 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2197 struct amdgpu_device *adev = crtc->dev->dev_private;
2198 int xorigin = 0, yorigin = 0;
2199
2200 amdgpu_crtc->cursor_x = x;
2201 amdgpu_crtc->cursor_y = y;
2202
2203
2204 x += crtc->x;
2205 y += crtc->y;
2206 DRM_DEBUG("x %d y %d c->x %d c->y %d\n", x, y, crtc->x, crtc->y);
2207
2208 if (x < 0) {
2209 xorigin = min(-x, amdgpu_crtc->max_cursor_width - 1);
2210 x = 0;
2211 }
2212 if (y < 0) {
2213 yorigin = min(-y, amdgpu_crtc->max_cursor_height - 1);
2214 y = 0;
2215 }
2216
2217 WREG32(mmCUR_POSITION + amdgpu_crtc->crtc_offset, (x << 16) | y);
2218 WREG32(mmCUR_HOT_SPOT + amdgpu_crtc->crtc_offset, (xorigin << 16) | yorigin);
2219 WREG32(mmCUR_SIZE + amdgpu_crtc->crtc_offset,
2220 ((amdgpu_crtc->cursor_width - 1) << 16) | (amdgpu_crtc->cursor_height - 1));
2221
2222 return 0;
2223}
2224
2225static int dce_v8_0_crtc_cursor_move(struct drm_crtc *crtc,
2226 int x, int y)
2227{
2228 int ret;
2229
2230 dce_v8_0_lock_cursor(crtc, true);
2231 ret = dce_v8_0_cursor_move_locked(crtc, x, y);
2232 dce_v8_0_lock_cursor(crtc, false);
2233
2234 return ret;
2235}
2236
2237static int dce_v8_0_crtc_cursor_set2(struct drm_crtc *crtc,
2238 struct drm_file *file_priv,
2239 uint32_t handle,
2240 uint32_t width,
2241 uint32_t height,
2242 int32_t hot_x,
2243 int32_t hot_y)
2244{
2245 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2246 struct drm_gem_object *obj;
2247 struct amdgpu_bo *aobj;
2248 int ret;
2249
2250 if (!handle) {
2251
2252 dce_v8_0_hide_cursor(crtc);
2253 obj = NULL;
2254 goto unpin;
2255 }
2256
2257 if ((width > amdgpu_crtc->max_cursor_width) ||
2258 (height > amdgpu_crtc->max_cursor_height)) {
2259 DRM_ERROR("bad cursor width or height %d x %d\n", width, height);
2260 return -EINVAL;
2261 }
2262
2263 obj = drm_gem_object_lookup(file_priv, handle);
2264 if (!obj) {
2265 DRM_ERROR("Cannot find cursor object %x for crtc %d\n", handle, amdgpu_crtc->crtc_id);
2266 return -ENOENT;
2267 }
2268
2269 aobj = gem_to_amdgpu_bo(obj);
2270 ret = amdgpu_bo_reserve(aobj, false);
2271 if (ret != 0) {
2272 drm_gem_object_put_unlocked(obj);
2273 return ret;
2274 }
2275
2276 ret = amdgpu_bo_pin(aobj, AMDGPU_GEM_DOMAIN_VRAM);
2277 amdgpu_bo_unreserve(aobj);
2278 if (ret) {
2279 DRM_ERROR("Failed to pin new cursor BO (%d)\n", ret);
2280 drm_gem_object_put_unlocked(obj);
2281 return ret;
2282 }
2283 amdgpu_crtc->cursor_addr = amdgpu_bo_gpu_offset(aobj);
2284
2285 dce_v8_0_lock_cursor(crtc, true);
2286
2287 if (width != amdgpu_crtc->cursor_width ||
2288 height != amdgpu_crtc->cursor_height ||
2289 hot_x != amdgpu_crtc->cursor_hot_x ||
2290 hot_y != amdgpu_crtc->cursor_hot_y) {
2291 int x, y;
2292
2293 x = amdgpu_crtc->cursor_x + amdgpu_crtc->cursor_hot_x - hot_x;
2294 y = amdgpu_crtc->cursor_y + amdgpu_crtc->cursor_hot_y - hot_y;
2295
2296 dce_v8_0_cursor_move_locked(crtc, x, y);
2297
2298 amdgpu_crtc->cursor_width = width;
2299 amdgpu_crtc->cursor_height = height;
2300 amdgpu_crtc->cursor_hot_x = hot_x;
2301 amdgpu_crtc->cursor_hot_y = hot_y;
2302 }
2303
2304 dce_v8_0_show_cursor(crtc);
2305 dce_v8_0_lock_cursor(crtc, false);
2306
2307unpin:
2308 if (amdgpu_crtc->cursor_bo) {
2309 struct amdgpu_bo *aobj = gem_to_amdgpu_bo(amdgpu_crtc->cursor_bo);
2310 ret = amdgpu_bo_reserve(aobj, true);
2311 if (likely(ret == 0)) {
2312 amdgpu_bo_unpin(aobj);
2313 amdgpu_bo_unreserve(aobj);
2314 }
2315 drm_gem_object_put_unlocked(amdgpu_crtc->cursor_bo);
2316 }
2317
2318 amdgpu_crtc->cursor_bo = obj;
2319 return 0;
2320}
2321
2322static void dce_v8_0_cursor_reset(struct drm_crtc *crtc)
2323{
2324 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2325
2326 if (amdgpu_crtc->cursor_bo) {
2327 dce_v8_0_lock_cursor(crtc, true);
2328
2329 dce_v8_0_cursor_move_locked(crtc, amdgpu_crtc->cursor_x,
2330 amdgpu_crtc->cursor_y);
2331
2332 dce_v8_0_show_cursor(crtc);
2333
2334 dce_v8_0_lock_cursor(crtc, false);
2335 }
2336}
2337
2338static int dce_v8_0_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
2339 u16 *blue, uint32_t size,
2340 struct drm_modeset_acquire_ctx *ctx)
2341{
2342 dce_v8_0_crtc_load_lut(crtc);
2343
2344 return 0;
2345}
2346
2347static void dce_v8_0_crtc_destroy(struct drm_crtc *crtc)
2348{
2349 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2350
2351 drm_crtc_cleanup(crtc);
2352 kfree(amdgpu_crtc);
2353}
2354
2355static const struct drm_crtc_funcs dce_v8_0_crtc_funcs = {
2356 .cursor_set2 = dce_v8_0_crtc_cursor_set2,
2357 .cursor_move = dce_v8_0_crtc_cursor_move,
2358 .gamma_set = dce_v8_0_crtc_gamma_set,
2359 .set_config = amdgpu_display_crtc_set_config,
2360 .destroy = dce_v8_0_crtc_destroy,
2361 .page_flip_target = amdgpu_display_crtc_page_flip_target,
2362};
2363
2364static void dce_v8_0_crtc_dpms(struct drm_crtc *crtc, int mode)
2365{
2366 struct drm_device *dev = crtc->dev;
2367 struct amdgpu_device *adev = dev->dev_private;
2368 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2369 unsigned type;
2370
2371 switch (mode) {
2372 case DRM_MODE_DPMS_ON:
2373 amdgpu_crtc->enabled = true;
2374 amdgpu_atombios_crtc_enable(crtc, ATOM_ENABLE);
2375 dce_v8_0_vga_enable(crtc, true);
2376 amdgpu_atombios_crtc_blank(crtc, ATOM_DISABLE);
2377 dce_v8_0_vga_enable(crtc, false);
2378
2379 type = amdgpu_display_crtc_idx_to_irq_type(adev,
2380 amdgpu_crtc->crtc_id);
2381 amdgpu_irq_update(adev, &adev->crtc_irq, type);
2382 amdgpu_irq_update(adev, &adev->pageflip_irq, type);
2383 drm_crtc_vblank_on(crtc);
2384 dce_v8_0_crtc_load_lut(crtc);
2385 break;
2386 case DRM_MODE_DPMS_STANDBY:
2387 case DRM_MODE_DPMS_SUSPEND:
2388 case DRM_MODE_DPMS_OFF:
2389 drm_crtc_vblank_off(crtc);
2390 if (amdgpu_crtc->enabled) {
2391 dce_v8_0_vga_enable(crtc, true);
2392 amdgpu_atombios_crtc_blank(crtc, ATOM_ENABLE);
2393 dce_v8_0_vga_enable(crtc, false);
2394 }
2395 amdgpu_atombios_crtc_enable(crtc, ATOM_DISABLE);
2396 amdgpu_crtc->enabled = false;
2397 break;
2398 }
2399
2400 amdgpu_pm_compute_clocks(adev);
2401}
2402
2403static void dce_v8_0_crtc_prepare(struct drm_crtc *crtc)
2404{
2405
2406 amdgpu_atombios_crtc_powergate(crtc, ATOM_DISABLE);
2407 amdgpu_atombios_crtc_lock(crtc, ATOM_ENABLE);
2408 dce_v8_0_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2409}
2410
2411static void dce_v8_0_crtc_commit(struct drm_crtc *crtc)
2412{
2413 dce_v8_0_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
2414 amdgpu_atombios_crtc_lock(crtc, ATOM_DISABLE);
2415}
2416
2417static void dce_v8_0_crtc_disable(struct drm_crtc *crtc)
2418{
2419 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2420 struct drm_device *dev = crtc->dev;
2421 struct amdgpu_device *adev = dev->dev_private;
2422 struct amdgpu_atom_ss ss;
2423 int i;
2424
2425 dce_v8_0_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2426 if (crtc->primary->fb) {
2427 int r;
2428 struct amdgpu_bo *abo;
2429
2430 abo = gem_to_amdgpu_bo(crtc->primary->fb->obj[0]);
2431 r = amdgpu_bo_reserve(abo, true);
2432 if (unlikely(r))
2433 DRM_ERROR("failed to reserve abo before unpin\n");
2434 else {
2435 amdgpu_bo_unpin(abo);
2436 amdgpu_bo_unreserve(abo);
2437 }
2438 }
2439
2440 dce_v8_0_grph_enable(crtc, false);
2441
2442 amdgpu_atombios_crtc_powergate(crtc, ATOM_ENABLE);
2443
2444 for (i = 0; i < adev->mode_info.num_crtc; i++) {
2445 if (adev->mode_info.crtcs[i] &&
2446 adev->mode_info.crtcs[i]->enabled &&
2447 i != amdgpu_crtc->crtc_id &&
2448 amdgpu_crtc->pll_id == adev->mode_info.crtcs[i]->pll_id) {
2449
2450
2451
2452 goto done;
2453 }
2454 }
2455
2456 switch (amdgpu_crtc->pll_id) {
2457 case ATOM_PPLL1:
2458 case ATOM_PPLL2:
2459
2460 amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, amdgpu_crtc->pll_id,
2461 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2462 break;
2463 case ATOM_PPLL0:
2464
2465 if ((adev->asic_type == CHIP_KAVERI) ||
2466 (adev->asic_type == CHIP_BONAIRE) ||
2467 (adev->asic_type == CHIP_HAWAII))
2468 amdgpu_atombios_crtc_program_pll(crtc, amdgpu_crtc->crtc_id, amdgpu_crtc->pll_id,
2469 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2470 break;
2471 default:
2472 break;
2473 }
2474done:
2475 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
2476 amdgpu_crtc->adjusted_clock = 0;
2477 amdgpu_crtc->encoder = NULL;
2478 amdgpu_crtc->connector = NULL;
2479}
2480
2481static int dce_v8_0_crtc_mode_set(struct drm_crtc *crtc,
2482 struct drm_display_mode *mode,
2483 struct drm_display_mode *adjusted_mode,
2484 int x, int y, struct drm_framebuffer *old_fb)
2485{
2486 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2487
2488 if (!amdgpu_crtc->adjusted_clock)
2489 return -EINVAL;
2490
2491 amdgpu_atombios_crtc_set_pll(crtc, adjusted_mode);
2492 amdgpu_atombios_crtc_set_dtd_timing(crtc, adjusted_mode);
2493 dce_v8_0_crtc_do_set_base(crtc, old_fb, x, y, 0);
2494 amdgpu_atombios_crtc_overscan_setup(crtc, mode, adjusted_mode);
2495 amdgpu_atombios_crtc_scaler_setup(crtc);
2496 dce_v8_0_cursor_reset(crtc);
2497
2498 amdgpu_crtc->hw_mode = *adjusted_mode;
2499
2500 return 0;
2501}
2502
2503static bool dce_v8_0_crtc_mode_fixup(struct drm_crtc *crtc,
2504 const struct drm_display_mode *mode,
2505 struct drm_display_mode *adjusted_mode)
2506{
2507 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc);
2508 struct drm_device *dev = crtc->dev;
2509 struct drm_encoder *encoder;
2510
2511
2512 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2513 if (encoder->crtc == crtc) {
2514 amdgpu_crtc->encoder = encoder;
2515 amdgpu_crtc->connector = amdgpu_get_connector_for_encoder(encoder);
2516 break;
2517 }
2518 }
2519 if ((amdgpu_crtc->encoder == NULL) || (amdgpu_crtc->connector == NULL)) {
2520 amdgpu_crtc->encoder = NULL;
2521 amdgpu_crtc->connector = NULL;
2522 return false;
2523 }
2524 if (!amdgpu_display_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
2525 return false;
2526 if (amdgpu_atombios_crtc_prepare_pll(crtc, adjusted_mode))
2527 return false;
2528
2529 amdgpu_crtc->pll_id = dce_v8_0_pick_pll(crtc);
2530
2531 if ((amdgpu_crtc->pll_id == ATOM_PPLL_INVALID) &&
2532 !ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(amdgpu_crtc->encoder)))
2533 return false;
2534
2535 return true;
2536}
2537
2538static int dce_v8_0_crtc_set_base(struct drm_crtc *crtc, int x, int y,
2539 struct drm_framebuffer *old_fb)
2540{
2541 return dce_v8_0_crtc_do_set_base(crtc, old_fb, x, y, 0);
2542}
2543
2544static int dce_v8_0_crtc_set_base_atomic(struct drm_crtc *crtc,
2545 struct drm_framebuffer *fb,
2546 int x, int y, enum mode_set_atomic state)
2547{
2548 return dce_v8_0_crtc_do_set_base(crtc, fb, x, y, 1);
2549}
2550
2551static const struct drm_crtc_helper_funcs dce_v8_0_crtc_helper_funcs = {
2552 .dpms = dce_v8_0_crtc_dpms,
2553 .mode_fixup = dce_v8_0_crtc_mode_fixup,
2554 .mode_set = dce_v8_0_crtc_mode_set,
2555 .mode_set_base = dce_v8_0_crtc_set_base,
2556 .mode_set_base_atomic = dce_v8_0_crtc_set_base_atomic,
2557 .prepare = dce_v8_0_crtc_prepare,
2558 .commit = dce_v8_0_crtc_commit,
2559 .disable = dce_v8_0_crtc_disable,
2560};
2561
2562static int dce_v8_0_crtc_init(struct amdgpu_device *adev, int index)
2563{
2564 struct amdgpu_crtc *amdgpu_crtc;
2565
2566 amdgpu_crtc = kzalloc(sizeof(struct amdgpu_crtc) +
2567 (AMDGPUFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
2568 if (amdgpu_crtc == NULL)
2569 return -ENOMEM;
2570
2571 drm_crtc_init(adev->ddev, &amdgpu_crtc->base, &dce_v8_0_crtc_funcs);
2572
2573 drm_mode_crtc_set_gamma_size(&amdgpu_crtc->base, 256);
2574 amdgpu_crtc->crtc_id = index;
2575 adev->mode_info.crtcs[index] = amdgpu_crtc;
2576
2577 amdgpu_crtc->max_cursor_width = CIK_CURSOR_WIDTH;
2578 amdgpu_crtc->max_cursor_height = CIK_CURSOR_HEIGHT;
2579 adev->ddev->mode_config.cursor_width = amdgpu_crtc->max_cursor_width;
2580 adev->ddev->mode_config.cursor_height = amdgpu_crtc->max_cursor_height;
2581
2582 amdgpu_crtc->crtc_offset = crtc_offsets[amdgpu_crtc->crtc_id];
2583
2584 amdgpu_crtc->pll_id = ATOM_PPLL_INVALID;
2585 amdgpu_crtc->adjusted_clock = 0;
2586 amdgpu_crtc->encoder = NULL;
2587 amdgpu_crtc->connector = NULL;
2588 drm_crtc_helper_add(&amdgpu_crtc->base, &dce_v8_0_crtc_helper_funcs);
2589
2590 return 0;
2591}
2592
2593static int dce_v8_0_early_init(void *handle)
2594{
2595 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2596
2597 adev->audio_endpt_rreg = &dce_v8_0_audio_endpt_rreg;
2598 adev->audio_endpt_wreg = &dce_v8_0_audio_endpt_wreg;
2599
2600 dce_v8_0_set_display_funcs(adev);
2601
2602 adev->mode_info.num_crtc = dce_v8_0_get_num_crtc(adev);
2603
2604 switch (adev->asic_type) {
2605 case CHIP_BONAIRE:
2606 case CHIP_HAWAII:
2607 adev->mode_info.num_hpd = 6;
2608 adev->mode_info.num_dig = 6;
2609 break;
2610 case CHIP_KAVERI:
2611 adev->mode_info.num_hpd = 6;
2612 adev->mode_info.num_dig = 7;
2613 break;
2614 case CHIP_KABINI:
2615 case CHIP_MULLINS:
2616 adev->mode_info.num_hpd = 6;
2617 adev->mode_info.num_dig = 6;
2618 break;
2619 default:
2620
2621 return -EINVAL;
2622 }
2623
2624 dce_v8_0_set_irq_funcs(adev);
2625
2626 return 0;
2627}
2628
2629static int dce_v8_0_sw_init(void *handle)
2630{
2631 int r, i;
2632 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2633
2634 for (i = 0; i < adev->mode_info.num_crtc; i++) {
2635 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_LEGACY, i + 1, &adev->crtc_irq);
2636 if (r)
2637 return r;
2638 }
2639
2640 for (i = 8; i < 20; i += 2) {
2641 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_LEGACY, i, &adev->pageflip_irq);
2642 if (r)
2643 return r;
2644 }
2645
2646
2647 r = amdgpu_irq_add_id(adev, AMDGPU_IH_CLIENTID_LEGACY, 42, &adev->hpd_irq);
2648 if (r)
2649 return r;
2650
2651 adev->ddev->mode_config.funcs = &amdgpu_mode_funcs;
2652
2653 adev->ddev->mode_config.async_page_flip = true;
2654
2655 adev->ddev->mode_config.max_width = 16384;
2656 adev->ddev->mode_config.max_height = 16384;
2657
2658 adev->ddev->mode_config.preferred_depth = 24;
2659 adev->ddev->mode_config.prefer_shadow = 1;
2660
2661 adev->ddev->mode_config.fb_base = adev->gmc.aper_base;
2662
2663 r = amdgpu_display_modeset_create_props(adev);
2664 if (r)
2665 return r;
2666
2667 adev->ddev->mode_config.max_width = 16384;
2668 adev->ddev->mode_config.max_height = 16384;
2669
2670
2671 for (i = 0; i < adev->mode_info.num_crtc; i++) {
2672 r = dce_v8_0_crtc_init(adev, i);
2673 if (r)
2674 return r;
2675 }
2676
2677 if (amdgpu_atombios_get_connector_info_from_object_table(adev))
2678 amdgpu_display_print_display_setup(adev->ddev);
2679 else
2680 return -EINVAL;
2681
2682
2683 r = dce_v8_0_afmt_init(adev);
2684 if (r)
2685 return r;
2686
2687 r = dce_v8_0_audio_init(adev);
2688 if (r)
2689 return r;
2690
2691 drm_kms_helper_poll_init(adev->ddev);
2692
2693 adev->mode_info.mode_config_initialized = true;
2694 return 0;
2695}
2696
2697static int dce_v8_0_sw_fini(void *handle)
2698{
2699 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2700
2701 kfree(adev->mode_info.bios_hardcoded_edid);
2702
2703 drm_kms_helper_poll_fini(adev->ddev);
2704
2705 dce_v8_0_audio_fini(adev);
2706
2707 dce_v8_0_afmt_fini(adev);
2708
2709 drm_mode_config_cleanup(adev->ddev);
2710 adev->mode_info.mode_config_initialized = false;
2711
2712 return 0;
2713}
2714
2715static int dce_v8_0_hw_init(void *handle)
2716{
2717 int i;
2718 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2719
2720
2721 dce_v8_0_set_vga_render_state(adev, false);
2722
2723 amdgpu_atombios_encoder_init_dig(adev);
2724 amdgpu_atombios_crtc_set_disp_eng_pll(adev, adev->clock.default_dispclk);
2725
2726
2727 dce_v8_0_hpd_init(adev);
2728
2729 for (i = 0; i < adev->mode_info.audio.num_pins; i++) {
2730 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false);
2731 }
2732
2733 dce_v8_0_pageflip_interrupt_init(adev);
2734
2735 return 0;
2736}
2737
2738static int dce_v8_0_hw_fini(void *handle)
2739{
2740 int i;
2741 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2742
2743 dce_v8_0_hpd_fini(adev);
2744
2745 for (i = 0; i < adev->mode_info.audio.num_pins; i++) {
2746 dce_v8_0_audio_enable(adev, &adev->mode_info.audio.pin[i], false);
2747 }
2748
2749 dce_v8_0_pageflip_interrupt_fini(adev);
2750
2751 return 0;
2752}
2753
2754static int dce_v8_0_suspend(void *handle)
2755{
2756 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2757
2758 adev->mode_info.bl_level =
2759 amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
2760
2761 return dce_v8_0_hw_fini(handle);
2762}
2763
2764static int dce_v8_0_resume(void *handle)
2765{
2766 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2767 int ret;
2768
2769 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev,
2770 adev->mode_info.bl_level);
2771
2772 ret = dce_v8_0_hw_init(handle);
2773
2774
2775 if (adev->mode_info.bl_encoder) {
2776 u8 bl_level = amdgpu_display_backlight_get_level(adev,
2777 adev->mode_info.bl_encoder);
2778 amdgpu_display_backlight_set_level(adev, adev->mode_info.bl_encoder,
2779 bl_level);
2780 }
2781
2782 return ret;
2783}
2784
2785static bool dce_v8_0_is_idle(void *handle)
2786{
2787 return true;
2788}
2789
2790static int dce_v8_0_wait_for_idle(void *handle)
2791{
2792 return 0;
2793}
2794
2795static int dce_v8_0_soft_reset(void *handle)
2796{
2797 u32 srbm_soft_reset = 0, tmp;
2798 struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2799
2800 if (dce_v8_0_is_display_hung(adev))
2801 srbm_soft_reset |= SRBM_SOFT_RESET__SOFT_RESET_DC_MASK;
2802
2803 if (srbm_soft_reset) {
2804 tmp = RREG32(mmSRBM_SOFT_RESET);
2805 tmp |= srbm_soft_reset;
2806 dev_info(adev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
2807 WREG32(mmSRBM_SOFT_RESET, tmp);
2808 tmp = RREG32(mmSRBM_SOFT_RESET);
2809
2810 udelay(50);
2811
2812 tmp &= ~srbm_soft_reset;
2813 WREG32(mmSRBM_SOFT_RESET, tmp);
2814 tmp = RREG32(mmSRBM_SOFT_RESET);
2815
2816
2817 udelay(50);
2818 }
2819 return 0;
2820}
2821
2822static void dce_v8_0_set_crtc_vblank_interrupt_state(struct amdgpu_device *adev,
2823 int crtc,
2824 enum amdgpu_interrupt_state state)
2825{
2826 u32 reg_block, lb_interrupt_mask;
2827
2828 if (crtc >= adev->mode_info.num_crtc) {
2829 DRM_DEBUG("invalid crtc %d\n", crtc);
2830 return;
2831 }
2832
2833 switch (crtc) {
2834 case 0:
2835 reg_block = CRTC0_REGISTER_OFFSET;
2836 break;
2837 case 1:
2838 reg_block = CRTC1_REGISTER_OFFSET;
2839 break;
2840 case 2:
2841 reg_block = CRTC2_REGISTER_OFFSET;
2842 break;
2843 case 3:
2844 reg_block = CRTC3_REGISTER_OFFSET;
2845 break;
2846 case 4:
2847 reg_block = CRTC4_REGISTER_OFFSET;
2848 break;
2849 case 5:
2850 reg_block = CRTC5_REGISTER_OFFSET;
2851 break;
2852 default:
2853 DRM_DEBUG("invalid crtc %d\n", crtc);
2854 return;
2855 }
2856
2857 switch (state) {
2858 case AMDGPU_IRQ_STATE_DISABLE:
2859 lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + reg_block);
2860 lb_interrupt_mask &= ~LB_INTERRUPT_MASK__VBLANK_INTERRUPT_MASK_MASK;
2861 WREG32(mmLB_INTERRUPT_MASK + reg_block, lb_interrupt_mask);
2862 break;
2863 case AMDGPU_IRQ_STATE_ENABLE:
2864 lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + reg_block);
2865 lb_interrupt_mask |= LB_INTERRUPT_MASK__VBLANK_INTERRUPT_MASK_MASK;
2866 WREG32(mmLB_INTERRUPT_MASK + reg_block, lb_interrupt_mask);
2867 break;
2868 default:
2869 break;
2870 }
2871}
2872
2873static void dce_v8_0_set_crtc_vline_interrupt_state(struct amdgpu_device *adev,
2874 int crtc,
2875 enum amdgpu_interrupt_state state)
2876{
2877 u32 reg_block, lb_interrupt_mask;
2878
2879 if (crtc >= adev->mode_info.num_crtc) {
2880 DRM_DEBUG("invalid crtc %d\n", crtc);
2881 return;
2882 }
2883
2884 switch (crtc) {
2885 case 0:
2886 reg_block = CRTC0_REGISTER_OFFSET;
2887 break;
2888 case 1:
2889 reg_block = CRTC1_REGISTER_OFFSET;
2890 break;
2891 case 2:
2892 reg_block = CRTC2_REGISTER_OFFSET;
2893 break;
2894 case 3:
2895 reg_block = CRTC3_REGISTER_OFFSET;
2896 break;
2897 case 4:
2898 reg_block = CRTC4_REGISTER_OFFSET;
2899 break;
2900 case 5:
2901 reg_block = CRTC5_REGISTER_OFFSET;
2902 break;
2903 default:
2904 DRM_DEBUG("invalid crtc %d\n", crtc);
2905 return;
2906 }
2907
2908 switch (state) {
2909 case AMDGPU_IRQ_STATE_DISABLE:
2910 lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + reg_block);
2911 lb_interrupt_mask &= ~LB_INTERRUPT_MASK__VLINE_INTERRUPT_MASK_MASK;
2912 WREG32(mmLB_INTERRUPT_MASK + reg_block, lb_interrupt_mask);
2913 break;
2914 case AMDGPU_IRQ_STATE_ENABLE:
2915 lb_interrupt_mask = RREG32(mmLB_INTERRUPT_MASK + reg_block);
2916 lb_interrupt_mask |= LB_INTERRUPT_MASK__VLINE_INTERRUPT_MASK_MASK;
2917 WREG32(mmLB_INTERRUPT_MASK + reg_block, lb_interrupt_mask);
2918 break;
2919 default:
2920 break;
2921 }
2922}
2923
2924static int dce_v8_0_set_hpd_interrupt_state(struct amdgpu_device *adev,
2925 struct amdgpu_irq_src *src,
2926 unsigned type,
2927 enum amdgpu_interrupt_state state)
2928{
2929 u32 dc_hpd_int_cntl;
2930
2931 if (type >= adev->mode_info.num_hpd) {
2932 DRM_DEBUG("invalid hdp %d\n", type);
2933 return 0;
2934 }
2935
2936 switch (state) {
2937 case AMDGPU_IRQ_STATE_DISABLE:
2938 dc_hpd_int_cntl = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type]);
2939 dc_hpd_int_cntl &= ~DC_HPD1_INT_CONTROL__DC_HPD1_INT_EN_MASK;
2940 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type], dc_hpd_int_cntl);
2941 break;
2942 case AMDGPU_IRQ_STATE_ENABLE:
2943 dc_hpd_int_cntl = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type]);
2944 dc_hpd_int_cntl |= DC_HPD1_INT_CONTROL__DC_HPD1_INT_EN_MASK;
2945 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[type], dc_hpd_int_cntl);
2946 break;
2947 default:
2948 break;
2949 }
2950
2951 return 0;
2952}
2953
2954static int dce_v8_0_set_crtc_interrupt_state(struct amdgpu_device *adev,
2955 struct amdgpu_irq_src *src,
2956 unsigned type,
2957 enum amdgpu_interrupt_state state)
2958{
2959 switch (type) {
2960 case AMDGPU_CRTC_IRQ_VBLANK1:
2961 dce_v8_0_set_crtc_vblank_interrupt_state(adev, 0, state);
2962 break;
2963 case AMDGPU_CRTC_IRQ_VBLANK2:
2964 dce_v8_0_set_crtc_vblank_interrupt_state(adev, 1, state);
2965 break;
2966 case AMDGPU_CRTC_IRQ_VBLANK3:
2967 dce_v8_0_set_crtc_vblank_interrupt_state(adev, 2, state);
2968 break;
2969 case AMDGPU_CRTC_IRQ_VBLANK4:
2970 dce_v8_0_set_crtc_vblank_interrupt_state(adev, 3, state);
2971 break;
2972 case AMDGPU_CRTC_IRQ_VBLANK5:
2973 dce_v8_0_set_crtc_vblank_interrupt_state(adev, 4, state);
2974 break;
2975 case AMDGPU_CRTC_IRQ_VBLANK6:
2976 dce_v8_0_set_crtc_vblank_interrupt_state(adev, 5, state);
2977 break;
2978 case AMDGPU_CRTC_IRQ_VLINE1:
2979 dce_v8_0_set_crtc_vline_interrupt_state(adev, 0, state);
2980 break;
2981 case AMDGPU_CRTC_IRQ_VLINE2:
2982 dce_v8_0_set_crtc_vline_interrupt_state(adev, 1, state);
2983 break;
2984 case AMDGPU_CRTC_IRQ_VLINE3:
2985 dce_v8_0_set_crtc_vline_interrupt_state(adev, 2, state);
2986 break;
2987 case AMDGPU_CRTC_IRQ_VLINE4:
2988 dce_v8_0_set_crtc_vline_interrupt_state(adev, 3, state);
2989 break;
2990 case AMDGPU_CRTC_IRQ_VLINE5:
2991 dce_v8_0_set_crtc_vline_interrupt_state(adev, 4, state);
2992 break;
2993 case AMDGPU_CRTC_IRQ_VLINE6:
2994 dce_v8_0_set_crtc_vline_interrupt_state(adev, 5, state);
2995 break;
2996 default:
2997 break;
2998 }
2999 return 0;
3000}
3001
3002static int dce_v8_0_crtc_irq(struct amdgpu_device *adev,
3003 struct amdgpu_irq_src *source,
3004 struct amdgpu_iv_entry *entry)
3005{
3006 unsigned crtc = entry->src_id - 1;
3007 uint32_t disp_int = RREG32(interrupt_status_offsets[crtc].reg);
3008 unsigned int irq_type = amdgpu_display_crtc_idx_to_irq_type(adev,
3009 crtc);
3010
3011 switch (entry->src_data[0]) {
3012 case 0:
3013 if (disp_int & interrupt_status_offsets[crtc].vblank)
3014 WREG32(mmLB_VBLANK_STATUS + crtc_offsets[crtc], LB_VBLANK_STATUS__VBLANK_ACK_MASK);
3015 else
3016 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
3017
3018 if (amdgpu_irq_enabled(adev, source, irq_type)) {
3019 drm_handle_vblank(adev->ddev, crtc);
3020 }
3021 DRM_DEBUG("IH: D%d vblank\n", crtc + 1);
3022 break;
3023 case 1:
3024 if (disp_int & interrupt_status_offsets[crtc].vline)
3025 WREG32(mmLB_VLINE_STATUS + crtc_offsets[crtc], LB_VLINE_STATUS__VLINE_ACK_MASK);
3026 else
3027 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
3028
3029 DRM_DEBUG("IH: D%d vline\n", crtc + 1);
3030 break;
3031 default:
3032 DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]);
3033 break;
3034 }
3035
3036 return 0;
3037}
3038
3039static int dce_v8_0_set_pageflip_interrupt_state(struct amdgpu_device *adev,
3040 struct amdgpu_irq_src *src,
3041 unsigned type,
3042 enum amdgpu_interrupt_state state)
3043{
3044 u32 reg;
3045
3046 if (type >= adev->mode_info.num_crtc) {
3047 DRM_ERROR("invalid pageflip crtc %d\n", type);
3048 return -EINVAL;
3049 }
3050
3051 reg = RREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type]);
3052 if (state == AMDGPU_IRQ_STATE_DISABLE)
3053 WREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type],
3054 reg & ~GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK);
3055 else
3056 WREG32(mmGRPH_INTERRUPT_CONTROL + crtc_offsets[type],
3057 reg | GRPH_INTERRUPT_CONTROL__GRPH_PFLIP_INT_MASK_MASK);
3058
3059 return 0;
3060}
3061
3062static int dce_v8_0_pageflip_irq(struct amdgpu_device *adev,
3063 struct amdgpu_irq_src *source,
3064 struct amdgpu_iv_entry *entry)
3065{
3066 unsigned long flags;
3067 unsigned crtc_id;
3068 struct amdgpu_crtc *amdgpu_crtc;
3069 struct amdgpu_flip_work *works;
3070
3071 crtc_id = (entry->src_id - 8) >> 1;
3072 amdgpu_crtc = adev->mode_info.crtcs[crtc_id];
3073
3074 if (crtc_id >= adev->mode_info.num_crtc) {
3075 DRM_ERROR("invalid pageflip crtc %d\n", crtc_id);
3076 return -EINVAL;
3077 }
3078
3079 if (RREG32(mmGRPH_INTERRUPT_STATUS + crtc_offsets[crtc_id]) &
3080 GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_OCCURRED_MASK)
3081 WREG32(mmGRPH_INTERRUPT_STATUS + crtc_offsets[crtc_id],
3082 GRPH_INTERRUPT_STATUS__GRPH_PFLIP_INT_CLEAR_MASK);
3083
3084
3085 if (amdgpu_crtc == NULL)
3086 return 0;
3087
3088 spin_lock_irqsave(&adev->ddev->event_lock, flags);
3089 works = amdgpu_crtc->pflip_works;
3090 if (amdgpu_crtc->pflip_status != AMDGPU_FLIP_SUBMITTED){
3091 DRM_DEBUG_DRIVER("amdgpu_crtc->pflip_status = %d != "
3092 "AMDGPU_FLIP_SUBMITTED(%d)\n",
3093 amdgpu_crtc->pflip_status,
3094 AMDGPU_FLIP_SUBMITTED);
3095 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
3096 return 0;
3097 }
3098
3099
3100 amdgpu_crtc->pflip_status = AMDGPU_FLIP_NONE;
3101 amdgpu_crtc->pflip_works = NULL;
3102
3103
3104 if (works->event)
3105 drm_crtc_send_vblank_event(&amdgpu_crtc->base, works->event);
3106
3107 spin_unlock_irqrestore(&adev->ddev->event_lock, flags);
3108
3109 drm_crtc_vblank_put(&amdgpu_crtc->base);
3110 schedule_work(&works->unpin_work);
3111
3112 return 0;
3113}
3114
3115static int dce_v8_0_hpd_irq(struct amdgpu_device *adev,
3116 struct amdgpu_irq_src *source,
3117 struct amdgpu_iv_entry *entry)
3118{
3119 uint32_t disp_int, mask, tmp;
3120 unsigned hpd;
3121
3122 if (entry->src_data[0] >= adev->mode_info.num_hpd) {
3123 DRM_DEBUG("Unhandled interrupt: %d %d\n", entry->src_id, entry->src_data[0]);
3124 return 0;
3125 }
3126
3127 hpd = entry->src_data[0];
3128 disp_int = RREG32(interrupt_status_offsets[hpd].reg);
3129 mask = interrupt_status_offsets[hpd].hpd;
3130
3131 if (disp_int & mask) {
3132 tmp = RREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd]);
3133 tmp |= DC_HPD1_INT_CONTROL__DC_HPD1_INT_ACK_MASK;
3134 WREG32(mmDC_HPD1_INT_CONTROL + hpd_offsets[hpd], tmp);
3135 schedule_work(&adev->hotplug_work);
3136 DRM_DEBUG("IH: HPD%d\n", hpd + 1);
3137 }
3138
3139 return 0;
3140
3141}
3142
3143static int dce_v8_0_set_clockgating_state(void *handle,
3144 enum amd_clockgating_state state)
3145{
3146 return 0;
3147}
3148
3149static int dce_v8_0_set_powergating_state(void *handle,
3150 enum amd_powergating_state state)
3151{
3152 return 0;
3153}
3154
3155static const struct amd_ip_funcs dce_v8_0_ip_funcs = {
3156 .name = "dce_v8_0",
3157 .early_init = dce_v8_0_early_init,
3158 .late_init = NULL,
3159 .sw_init = dce_v8_0_sw_init,
3160 .sw_fini = dce_v8_0_sw_fini,
3161 .hw_init = dce_v8_0_hw_init,
3162 .hw_fini = dce_v8_0_hw_fini,
3163 .suspend = dce_v8_0_suspend,
3164 .resume = dce_v8_0_resume,
3165 .is_idle = dce_v8_0_is_idle,
3166 .wait_for_idle = dce_v8_0_wait_for_idle,
3167 .soft_reset = dce_v8_0_soft_reset,
3168 .set_clockgating_state = dce_v8_0_set_clockgating_state,
3169 .set_powergating_state = dce_v8_0_set_powergating_state,
3170};
3171
3172static void
3173dce_v8_0_encoder_mode_set(struct drm_encoder *encoder,
3174 struct drm_display_mode *mode,
3175 struct drm_display_mode *adjusted_mode)
3176{
3177 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
3178
3179 amdgpu_encoder->pixel_clock = adjusted_mode->clock;
3180
3181
3182 amdgpu_atombios_encoder_dpms(encoder, DRM_MODE_DPMS_OFF);
3183
3184
3185 dce_v8_0_set_interleave(encoder->crtc, mode);
3186
3187 if (amdgpu_atombios_encoder_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_HDMI) {
3188 dce_v8_0_afmt_enable(encoder, true);
3189 dce_v8_0_afmt_setmode(encoder, adjusted_mode);
3190 }
3191}
3192
3193static void dce_v8_0_encoder_prepare(struct drm_encoder *encoder)
3194{
3195 struct amdgpu_device *adev = encoder->dev->dev_private;
3196 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
3197 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
3198
3199 if ((amdgpu_encoder->active_device &
3200 (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
3201 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) !=
3202 ENCODER_OBJECT_ID_NONE)) {
3203 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
3204 if (dig) {
3205 dig->dig_encoder = dce_v8_0_pick_dig_encoder(encoder);
3206 if (amdgpu_encoder->active_device & ATOM_DEVICE_DFP_SUPPORT)
3207 dig->afmt = adev->mode_info.afmt[dig->dig_encoder];
3208 }
3209 }
3210
3211 amdgpu_atombios_scratch_regs_lock(adev, true);
3212
3213 if (connector) {
3214 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
3215
3216
3217 if (amdgpu_connector->router.cd_valid)
3218 amdgpu_i2c_router_select_cd_port(amdgpu_connector);
3219
3220
3221 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP)
3222 amdgpu_atombios_encoder_set_edp_panel_power(connector,
3223 ATOM_TRANSMITTER_ACTION_POWER_ON);
3224 }
3225
3226
3227 amdgpu_atombios_encoder_set_crtc_source(encoder);
3228
3229 dce_v8_0_program_fmt(encoder);
3230}
3231
3232static void dce_v8_0_encoder_commit(struct drm_encoder *encoder)
3233{
3234 struct drm_device *dev = encoder->dev;
3235 struct amdgpu_device *adev = dev->dev_private;
3236
3237
3238 amdgpu_atombios_encoder_dpms(encoder, DRM_MODE_DPMS_ON);
3239 amdgpu_atombios_scratch_regs_lock(adev, false);
3240}
3241
3242static void dce_v8_0_encoder_disable(struct drm_encoder *encoder)
3243{
3244 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
3245 struct amdgpu_encoder_atom_dig *dig;
3246
3247 amdgpu_atombios_encoder_dpms(encoder, DRM_MODE_DPMS_OFF);
3248
3249 if (amdgpu_atombios_encoder_is_digital(encoder)) {
3250 if (amdgpu_atombios_encoder_get_encoder_mode(encoder) == ATOM_ENCODER_MODE_HDMI)
3251 dce_v8_0_afmt_enable(encoder, false);
3252 dig = amdgpu_encoder->enc_priv;
3253 dig->dig_encoder = -1;
3254 }
3255 amdgpu_encoder->active_device = 0;
3256}
3257
3258
3259static void dce_v8_0_ext_prepare(struct drm_encoder *encoder)
3260{
3261
3262}
3263
3264static void dce_v8_0_ext_commit(struct drm_encoder *encoder)
3265{
3266
3267}
3268
3269static void
3270dce_v8_0_ext_mode_set(struct drm_encoder *encoder,
3271 struct drm_display_mode *mode,
3272 struct drm_display_mode *adjusted_mode)
3273{
3274
3275}
3276
3277static void dce_v8_0_ext_disable(struct drm_encoder *encoder)
3278{
3279
3280}
3281
3282static void
3283dce_v8_0_ext_dpms(struct drm_encoder *encoder, int mode)
3284{
3285
3286}
3287
3288static const struct drm_encoder_helper_funcs dce_v8_0_ext_helper_funcs = {
3289 .dpms = dce_v8_0_ext_dpms,
3290 .prepare = dce_v8_0_ext_prepare,
3291 .mode_set = dce_v8_0_ext_mode_set,
3292 .commit = dce_v8_0_ext_commit,
3293 .disable = dce_v8_0_ext_disable,
3294
3295};
3296
3297static const struct drm_encoder_helper_funcs dce_v8_0_dig_helper_funcs = {
3298 .dpms = amdgpu_atombios_encoder_dpms,
3299 .mode_fixup = amdgpu_atombios_encoder_mode_fixup,
3300 .prepare = dce_v8_0_encoder_prepare,
3301 .mode_set = dce_v8_0_encoder_mode_set,
3302 .commit = dce_v8_0_encoder_commit,
3303 .disable = dce_v8_0_encoder_disable,
3304 .detect = amdgpu_atombios_encoder_dig_detect,
3305};
3306
3307static const struct drm_encoder_helper_funcs dce_v8_0_dac_helper_funcs = {
3308 .dpms = amdgpu_atombios_encoder_dpms,
3309 .mode_fixup = amdgpu_atombios_encoder_mode_fixup,
3310 .prepare = dce_v8_0_encoder_prepare,
3311 .mode_set = dce_v8_0_encoder_mode_set,
3312 .commit = dce_v8_0_encoder_commit,
3313 .detect = amdgpu_atombios_encoder_dac_detect,
3314};
3315
3316static void dce_v8_0_encoder_destroy(struct drm_encoder *encoder)
3317{
3318 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
3319 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
3320 amdgpu_atombios_encoder_fini_backlight(amdgpu_encoder);
3321 kfree(amdgpu_encoder->enc_priv);
3322 drm_encoder_cleanup(encoder);
3323 kfree(amdgpu_encoder);
3324}
3325
3326static const struct drm_encoder_funcs dce_v8_0_encoder_funcs = {
3327 .destroy = dce_v8_0_encoder_destroy,
3328};
3329
3330static void dce_v8_0_encoder_add(struct amdgpu_device *adev,
3331 uint32_t encoder_enum,
3332 uint32_t supported_device,
3333 u16 caps)
3334{
3335 struct drm_device *dev = adev->ddev;
3336 struct drm_encoder *encoder;
3337 struct amdgpu_encoder *amdgpu_encoder;
3338
3339
3340 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
3341 amdgpu_encoder = to_amdgpu_encoder(encoder);
3342 if (amdgpu_encoder->encoder_enum == encoder_enum) {
3343 amdgpu_encoder->devices |= supported_device;
3344 return;
3345 }
3346
3347 }
3348
3349
3350 amdgpu_encoder = kzalloc(sizeof(struct amdgpu_encoder), GFP_KERNEL);
3351 if (!amdgpu_encoder)
3352 return;
3353
3354 encoder = &amdgpu_encoder->base;
3355 switch (adev->mode_info.num_crtc) {
3356 case 1:
3357 encoder->possible_crtcs = 0x1;
3358 break;
3359 case 2:
3360 default:
3361 encoder->possible_crtcs = 0x3;
3362 break;
3363 case 4:
3364 encoder->possible_crtcs = 0xf;
3365 break;
3366 case 6:
3367 encoder->possible_crtcs = 0x3f;
3368 break;
3369 }
3370
3371 amdgpu_encoder->enc_priv = NULL;
3372
3373 amdgpu_encoder->encoder_enum = encoder_enum;
3374 amdgpu_encoder->encoder_id = (encoder_enum & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
3375 amdgpu_encoder->devices = supported_device;
3376 amdgpu_encoder->rmx_type = RMX_OFF;
3377 amdgpu_encoder->underscan_type = UNDERSCAN_OFF;
3378 amdgpu_encoder->is_ext_encoder = false;
3379 amdgpu_encoder->caps = caps;
3380
3381 switch (amdgpu_encoder->encoder_id) {
3382 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
3383 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
3384 drm_encoder_init(dev, encoder, &dce_v8_0_encoder_funcs,
3385 DRM_MODE_ENCODER_DAC, NULL);
3386 drm_encoder_helper_add(encoder, &dce_v8_0_dac_helper_funcs);
3387 break;
3388 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
3389 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
3390 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
3391 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
3392 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
3393 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
3394 amdgpu_encoder->rmx_type = RMX_FULL;
3395 drm_encoder_init(dev, encoder, &dce_v8_0_encoder_funcs,
3396 DRM_MODE_ENCODER_LVDS, NULL);
3397 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_lcd_info(amdgpu_encoder);
3398 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT)) {
3399 drm_encoder_init(dev, encoder, &dce_v8_0_encoder_funcs,
3400 DRM_MODE_ENCODER_DAC, NULL);
3401 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder);
3402 } else {
3403 drm_encoder_init(dev, encoder, &dce_v8_0_encoder_funcs,
3404 DRM_MODE_ENCODER_TMDS, NULL);
3405 amdgpu_encoder->enc_priv = amdgpu_atombios_encoder_get_dig_info(amdgpu_encoder);
3406 }
3407 drm_encoder_helper_add(encoder, &dce_v8_0_dig_helper_funcs);
3408 break;
3409 case ENCODER_OBJECT_ID_SI170B:
3410 case ENCODER_OBJECT_ID_CH7303:
3411 case ENCODER_OBJECT_ID_EXTERNAL_SDVOA:
3412 case ENCODER_OBJECT_ID_EXTERNAL_SDVOB:
3413 case ENCODER_OBJECT_ID_TITFP513:
3414 case ENCODER_OBJECT_ID_VT1623:
3415 case ENCODER_OBJECT_ID_HDMI_SI1930:
3416 case ENCODER_OBJECT_ID_TRAVIS:
3417 case ENCODER_OBJECT_ID_NUTMEG:
3418
3419 amdgpu_encoder->is_ext_encoder = true;
3420 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
3421 drm_encoder_init(dev, encoder, &dce_v8_0_encoder_funcs,
3422 DRM_MODE_ENCODER_LVDS, NULL);
3423 else if (amdgpu_encoder->devices & (ATOM_DEVICE_CRT_SUPPORT))
3424 drm_encoder_init(dev, encoder, &dce_v8_0_encoder_funcs,
3425 DRM_MODE_ENCODER_DAC, NULL);
3426 else
3427 drm_encoder_init(dev, encoder, &dce_v8_0_encoder_funcs,
3428 DRM_MODE_ENCODER_TMDS, NULL);
3429 drm_encoder_helper_add(encoder, &dce_v8_0_ext_helper_funcs);
3430 break;
3431 }
3432}
3433
3434static const struct amdgpu_display_funcs dce_v8_0_display_funcs = {
3435 .bandwidth_update = &dce_v8_0_bandwidth_update,
3436 .vblank_get_counter = &dce_v8_0_vblank_get_counter,
3437 .backlight_set_level = &amdgpu_atombios_encoder_set_backlight_level,
3438 .backlight_get_level = &amdgpu_atombios_encoder_get_backlight_level,
3439 .hpd_sense = &dce_v8_0_hpd_sense,
3440 .hpd_set_polarity = &dce_v8_0_hpd_set_polarity,
3441 .hpd_get_gpio_reg = &dce_v8_0_hpd_get_gpio_reg,
3442 .page_flip = &dce_v8_0_page_flip,
3443 .page_flip_get_scanoutpos = &dce_v8_0_crtc_get_scanoutpos,
3444 .add_encoder = &dce_v8_0_encoder_add,
3445 .add_connector = &amdgpu_connector_add,
3446};
3447
3448static void dce_v8_0_set_display_funcs(struct amdgpu_device *adev)
3449{
3450 if (adev->mode_info.funcs == NULL)
3451 adev->mode_info.funcs = &dce_v8_0_display_funcs;
3452}
3453
3454static const struct amdgpu_irq_src_funcs dce_v8_0_crtc_irq_funcs = {
3455 .set = dce_v8_0_set_crtc_interrupt_state,
3456 .process = dce_v8_0_crtc_irq,
3457};
3458
3459static const struct amdgpu_irq_src_funcs dce_v8_0_pageflip_irq_funcs = {
3460 .set = dce_v8_0_set_pageflip_interrupt_state,
3461 .process = dce_v8_0_pageflip_irq,
3462};
3463
3464static const struct amdgpu_irq_src_funcs dce_v8_0_hpd_irq_funcs = {
3465 .set = dce_v8_0_set_hpd_interrupt_state,
3466 .process = dce_v8_0_hpd_irq,
3467};
3468
3469static void dce_v8_0_set_irq_funcs(struct amdgpu_device *adev)
3470{
3471 if (adev->mode_info.num_crtc > 0)
3472 adev->crtc_irq.num_types = AMDGPU_CRTC_IRQ_VLINE1 + adev->mode_info.num_crtc;
3473 else
3474 adev->crtc_irq.num_types = 0;
3475 adev->crtc_irq.funcs = &dce_v8_0_crtc_irq_funcs;
3476
3477 adev->pageflip_irq.num_types = adev->mode_info.num_crtc;
3478 adev->pageflip_irq.funcs = &dce_v8_0_pageflip_irq_funcs;
3479
3480 adev->hpd_irq.num_types = adev->mode_info.num_hpd;
3481 adev->hpd_irq.funcs = &dce_v8_0_hpd_irq_funcs;
3482}
3483
3484const struct amdgpu_ip_block_version dce_v8_0_ip_block =
3485{
3486 .type = AMD_IP_BLOCK_TYPE_DCE,
3487 .major = 8,
3488 .minor = 0,
3489 .rev = 0,
3490 .funcs = &dce_v8_0_ip_funcs,
3491};
3492
3493const struct amdgpu_ip_block_version dce_v8_1_ip_block =
3494{
3495 .type = AMD_IP_BLOCK_TYPE_DCE,
3496 .major = 8,
3497 .minor = 1,
3498 .rev = 0,
3499 .funcs = &dce_v8_0_ip_funcs,
3500};
3501
3502const struct amdgpu_ip_block_version dce_v8_2_ip_block =
3503{
3504 .type = AMD_IP_BLOCK_TYPE_DCE,
3505 .major = 8,
3506 .minor = 2,
3507 .rev = 0,
3508 .funcs = &dce_v8_0_ip_funcs,
3509};
3510
3511const struct amdgpu_ip_block_version dce_v8_3_ip_block =
3512{
3513 .type = AMD_IP_BLOCK_TYPE_DCE,
3514 .major = 8,
3515 .minor = 3,
3516 .rev = 0,
3517 .funcs = &dce_v8_0_ip_funcs,
3518};
3519
3520const struct amdgpu_ip_block_version dce_v8_5_ip_block =
3521{
3522 .type = AMD_IP_BLOCK_TYPE_DCE,
3523 .major = 8,
3524 .minor = 5,
3525 .rev = 0,
3526 .funcs = &dce_v8_0_ip_funcs,
3527};
3528