1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <drm/drmP.h>
27#include <drm/drm_crtc_helper.h>
28#include <drm/drm_fb_helper.h>
29#include <drm/radeon_drm.h>
30#include <drm/drm_fixed.h>
31#include "radeon.h"
32#include "atom.h"
33#include "atom-bits.h"
34
35static void atombios_overscan_setup(struct drm_crtc *crtc,
36 struct drm_display_mode *mode,
37 struct drm_display_mode *adjusted_mode)
38{
39 struct drm_device *dev = crtc->dev;
40 struct radeon_device *rdev = dev->dev_private;
41 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
42 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
43 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
44 int a1, a2;
45
46 memset(&args, 0, sizeof(args));
47
48 args.ucCRTC = radeon_crtc->crtc_id;
49
50 switch (radeon_crtc->rmx_type) {
51 case RMX_CENTER:
52 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
53 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
54 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
55 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
56 break;
57 case RMX_ASPECT:
58 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
59 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
60
61 if (a1 > a2) {
62 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
63 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
64 } else if (a2 > a1) {
65 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
66 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
67 }
68 break;
69 case RMX_FULL:
70 default:
71 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
72 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
73 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
74 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
75 break;
76 }
77 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
78}
79
80static void atombios_scaler_setup(struct drm_crtc *crtc)
81{
82 struct drm_device *dev = crtc->dev;
83 struct radeon_device *rdev = dev->dev_private;
84 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
85 ENABLE_SCALER_PS_ALLOCATION args;
86 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
87 struct radeon_encoder *radeon_encoder =
88 to_radeon_encoder(radeon_crtc->encoder);
89
90 enum radeon_tv_std tv_std = TV_STD_NTSC;
91 bool is_tv = false, is_cv = false;
92
93 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
94 return;
95
96 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
97 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
98 tv_std = tv_dac->tv_std;
99 is_tv = true;
100 }
101
102 memset(&args, 0, sizeof(args));
103
104 args.ucScaler = radeon_crtc->crtc_id;
105
106 if (is_tv) {
107 switch (tv_std) {
108 case TV_STD_NTSC:
109 default:
110 args.ucTVStandard = ATOM_TV_NTSC;
111 break;
112 case TV_STD_PAL:
113 args.ucTVStandard = ATOM_TV_PAL;
114 break;
115 case TV_STD_PAL_M:
116 args.ucTVStandard = ATOM_TV_PALM;
117 break;
118 case TV_STD_PAL_60:
119 args.ucTVStandard = ATOM_TV_PAL60;
120 break;
121 case TV_STD_NTSC_J:
122 args.ucTVStandard = ATOM_TV_NTSCJ;
123 break;
124 case TV_STD_SCART_PAL:
125 args.ucTVStandard = ATOM_TV_PAL;
126 break;
127 case TV_STD_SECAM:
128 args.ucTVStandard = ATOM_TV_SECAM;
129 break;
130 case TV_STD_PAL_CN:
131 args.ucTVStandard = ATOM_TV_PALCN;
132 break;
133 }
134 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
135 } else if (is_cv) {
136 args.ucTVStandard = ATOM_TV_CV;
137 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
138 } else {
139 switch (radeon_crtc->rmx_type) {
140 case RMX_FULL:
141 args.ucEnable = ATOM_SCALER_EXPANSION;
142 break;
143 case RMX_CENTER:
144 args.ucEnable = ATOM_SCALER_CENTER;
145 break;
146 case RMX_ASPECT:
147 args.ucEnable = ATOM_SCALER_EXPANSION;
148 break;
149 default:
150 if (ASIC_IS_AVIVO(rdev))
151 args.ucEnable = ATOM_SCALER_DISABLE;
152 else
153 args.ucEnable = ATOM_SCALER_CENTER;
154 break;
155 }
156 }
157 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
158 if ((is_tv || is_cv)
159 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
160 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
161 }
162}
163
164static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
165{
166 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
167 struct drm_device *dev = crtc->dev;
168 struct radeon_device *rdev = dev->dev_private;
169 int index =
170 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
171 ENABLE_CRTC_PS_ALLOCATION args;
172
173 memset(&args, 0, sizeof(args));
174
175 args.ucCRTC = radeon_crtc->crtc_id;
176 args.ucEnable = lock;
177
178 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
179}
180
181static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
182{
183 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
184 struct drm_device *dev = crtc->dev;
185 struct radeon_device *rdev = dev->dev_private;
186 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
187 ENABLE_CRTC_PS_ALLOCATION args;
188
189 memset(&args, 0, sizeof(args));
190
191 args.ucCRTC = radeon_crtc->crtc_id;
192 args.ucEnable = state;
193
194 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
195}
196
197static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
198{
199 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
200 struct drm_device *dev = crtc->dev;
201 struct radeon_device *rdev = dev->dev_private;
202 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
203 ENABLE_CRTC_PS_ALLOCATION args;
204
205 memset(&args, 0, sizeof(args));
206
207 args.ucCRTC = radeon_crtc->crtc_id;
208 args.ucEnable = state;
209
210 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
211}
212
213static const u32 vga_control_regs[6] =
214{
215 AVIVO_D1VGA_CONTROL,
216 AVIVO_D2VGA_CONTROL,
217 EVERGREEN_D3VGA_CONTROL,
218 EVERGREEN_D4VGA_CONTROL,
219 EVERGREEN_D5VGA_CONTROL,
220 EVERGREEN_D6VGA_CONTROL,
221};
222
223static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
224{
225 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
226 struct drm_device *dev = crtc->dev;
227 struct radeon_device *rdev = dev->dev_private;
228 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
229 BLANK_CRTC_PS_ALLOCATION args;
230 u32 vga_control = 0;
231
232 memset(&args, 0, sizeof(args));
233
234 if (ASIC_IS_DCE8(rdev)) {
235 vga_control = RREG32(vga_control_regs[radeon_crtc->crtc_id]);
236 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control | 1);
237 }
238
239 args.ucCRTC = radeon_crtc->crtc_id;
240 args.ucBlanking = state;
241
242 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
243
244 if (ASIC_IS_DCE8(rdev)) {
245 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control);
246 }
247}
248
249static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
250{
251 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
252 struct drm_device *dev = crtc->dev;
253 struct radeon_device *rdev = dev->dev_private;
254 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
255 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
256
257 memset(&args, 0, sizeof(args));
258
259 args.ucDispPipeId = radeon_crtc->crtc_id;
260 args.ucEnable = state;
261
262 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
263}
264
265void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
266{
267 struct drm_device *dev = crtc->dev;
268 struct radeon_device *rdev = dev->dev_private;
269 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
270
271 switch (mode) {
272 case DRM_MODE_DPMS_ON:
273 radeon_crtc->enabled = true;
274 atombios_enable_crtc(crtc, ATOM_ENABLE);
275 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
276 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
277 atombios_blank_crtc(crtc, ATOM_DISABLE);
278 if (dev->num_crtcs > radeon_crtc->crtc_id)
279 drm_crtc_vblank_on(crtc);
280 radeon_crtc_load_lut(crtc);
281 break;
282 case DRM_MODE_DPMS_STANDBY:
283 case DRM_MODE_DPMS_SUSPEND:
284 case DRM_MODE_DPMS_OFF:
285 if (dev->num_crtcs > radeon_crtc->crtc_id)
286 drm_crtc_vblank_off(crtc);
287 if (radeon_crtc->enabled)
288 atombios_blank_crtc(crtc, ATOM_ENABLE);
289 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
290 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
291 atombios_enable_crtc(crtc, ATOM_DISABLE);
292 radeon_crtc->enabled = false;
293 break;
294 }
295
296 radeon_pm_compute_clocks(rdev);
297}
298
299static void
300atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
301 struct drm_display_mode *mode)
302{
303 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
304 struct drm_device *dev = crtc->dev;
305 struct radeon_device *rdev = dev->dev_private;
306 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
307 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
308 u16 misc = 0;
309
310 memset(&args, 0, sizeof(args));
311 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
312 args.usH_Blanking_Time =
313 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
314 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
315 args.usV_Blanking_Time =
316 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
317 args.usH_SyncOffset =
318 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
319 args.usH_SyncWidth =
320 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
321 args.usV_SyncOffset =
322 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
323 args.usV_SyncWidth =
324 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
325 args.ucH_Border = radeon_crtc->h_border;
326 args.ucV_Border = radeon_crtc->v_border;
327
328 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
329 misc |= ATOM_VSYNC_POLARITY;
330 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
331 misc |= ATOM_HSYNC_POLARITY;
332 if (mode->flags & DRM_MODE_FLAG_CSYNC)
333 misc |= ATOM_COMPOSITESYNC;
334 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
335 misc |= ATOM_INTERLACE;
336 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
337 misc |= ATOM_DOUBLE_CLOCK_MODE;
338 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
339 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
340
341 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
342 args.ucCRTC = radeon_crtc->crtc_id;
343
344 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
345}
346
347static void atombios_crtc_set_timing(struct drm_crtc *crtc,
348 struct drm_display_mode *mode)
349{
350 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
351 struct drm_device *dev = crtc->dev;
352 struct radeon_device *rdev = dev->dev_private;
353 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
354 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
355 u16 misc = 0;
356
357 memset(&args, 0, sizeof(args));
358 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
359 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
360 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
361 args.usH_SyncWidth =
362 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
363 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
364 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
365 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
366 args.usV_SyncWidth =
367 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
368
369 args.ucOverscanRight = radeon_crtc->h_border;
370 args.ucOverscanLeft = radeon_crtc->h_border;
371 args.ucOverscanBottom = radeon_crtc->v_border;
372 args.ucOverscanTop = radeon_crtc->v_border;
373
374 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
375 misc |= ATOM_VSYNC_POLARITY;
376 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
377 misc |= ATOM_HSYNC_POLARITY;
378 if (mode->flags & DRM_MODE_FLAG_CSYNC)
379 misc |= ATOM_COMPOSITESYNC;
380 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
381 misc |= ATOM_INTERLACE;
382 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
383 misc |= ATOM_DOUBLE_CLOCK_MODE;
384 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
385 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
386
387 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
388 args.ucCRTC = radeon_crtc->crtc_id;
389
390 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
391}
392
393static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
394{
395 u32 ss_cntl;
396
397 if (ASIC_IS_DCE4(rdev)) {
398 switch (pll_id) {
399 case ATOM_PPLL1:
400 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
401 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
402 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
403 break;
404 case ATOM_PPLL2:
405 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
406 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
407 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
408 break;
409 case ATOM_DCPLL:
410 case ATOM_PPLL_INVALID:
411 return;
412 }
413 } else if (ASIC_IS_AVIVO(rdev)) {
414 switch (pll_id) {
415 case ATOM_PPLL1:
416 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
417 ss_cntl &= ~1;
418 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
419 break;
420 case ATOM_PPLL2:
421 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
422 ss_cntl &= ~1;
423 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
424 break;
425 case ATOM_DCPLL:
426 case ATOM_PPLL_INVALID:
427 return;
428 }
429 }
430}
431
432
433union atom_enable_ss {
434 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
435 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
436 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
437 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
438 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
439};
440
441static void atombios_crtc_program_ss(struct radeon_device *rdev,
442 int enable,
443 int pll_id,
444 int crtc_id,
445 struct radeon_atom_ss *ss)
446{
447 unsigned i;
448 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
449 union atom_enable_ss args;
450
451 if (enable) {
452
453
454
455
456
457 if (ss->percentage == 0)
458 return;
459 if (ss->type & ATOM_EXTERNAL_SS_MASK)
460 return;
461 } else {
462 for (i = 0; i < rdev->num_crtc; i++) {
463 if (rdev->mode_info.crtcs[i] &&
464 rdev->mode_info.crtcs[i]->enabled &&
465 i != crtc_id &&
466 pll_id == rdev->mode_info.crtcs[i]->pll_id) {
467
468
469
470
471 return;
472 }
473 }
474 }
475
476 memset(&args, 0, sizeof(args));
477
478 if (ASIC_IS_DCE5(rdev)) {
479 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
480 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
481 switch (pll_id) {
482 case ATOM_PPLL1:
483 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
484 break;
485 case ATOM_PPLL2:
486 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
487 break;
488 case ATOM_DCPLL:
489 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
490 break;
491 case ATOM_PPLL_INVALID:
492 return;
493 }
494 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
495 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
496 args.v3.ucEnable = enable;
497 } else if (ASIC_IS_DCE4(rdev)) {
498 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
499 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
500 switch (pll_id) {
501 case ATOM_PPLL1:
502 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
503 break;
504 case ATOM_PPLL2:
505 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
506 break;
507 case ATOM_DCPLL:
508 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
509 break;
510 case ATOM_PPLL_INVALID:
511 return;
512 }
513 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
514 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
515 args.v2.ucEnable = enable;
516 } else if (ASIC_IS_DCE3(rdev)) {
517 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
518 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
519 args.v1.ucSpreadSpectrumStep = ss->step;
520 args.v1.ucSpreadSpectrumDelay = ss->delay;
521 args.v1.ucSpreadSpectrumRange = ss->range;
522 args.v1.ucPpll = pll_id;
523 args.v1.ucEnable = enable;
524 } else if (ASIC_IS_AVIVO(rdev)) {
525 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
526 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
527 atombios_disable_ss(rdev, pll_id);
528 return;
529 }
530 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
531 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
532 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
533 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
534 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
535 args.lvds_ss_2.ucEnable = enable;
536 } else {
537 if (enable == ATOM_DISABLE) {
538 atombios_disable_ss(rdev, pll_id);
539 return;
540 }
541 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
542 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
543 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
544 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
545 args.lvds_ss.ucEnable = enable;
546 }
547 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
548}
549
550union adjust_pixel_clock {
551 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
552 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
553};
554
555static u32 atombios_adjust_pll(struct drm_crtc *crtc,
556 struct drm_display_mode *mode)
557{
558 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
559 struct drm_device *dev = crtc->dev;
560 struct radeon_device *rdev = dev->dev_private;
561 struct drm_encoder *encoder = radeon_crtc->encoder;
562 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
563 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
564 u32 adjusted_clock = mode->clock;
565 int encoder_mode = atombios_get_encoder_mode(encoder);
566 u32 dp_clock = mode->clock;
567 u32 clock = mode->clock;
568 int bpc = radeon_crtc->bpc;
569 bool is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
570
571
572 radeon_crtc->pll_flags = 0;
573
574 if (ASIC_IS_AVIVO(rdev)) {
575 if ((rdev->family == CHIP_RS600) ||
576 (rdev->family == CHIP_RS690) ||
577 (rdev->family == CHIP_RS740))
578 radeon_crtc->pll_flags |= (
579 RADEON_PLL_PREFER_CLOSEST_LOWER);
580
581 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)
582 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
583 else
584 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
585
586 if (rdev->family < CHIP_RV770)
587 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
588
589 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
590 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
591
592 if (((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880))
593 && !radeon_crtc->ss_enabled)
594 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
595 if (ASIC_IS_DCE32(rdev) && mode->clock > 165000)
596 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
597 } else {
598 radeon_crtc->pll_flags |= RADEON_PLL_LEGACY;
599
600 if (mode->clock > 200000)
601 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
602 else
603 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
604 }
605
606 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
607 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
608 if (connector) {
609 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
610 struct radeon_connector_atom_dig *dig_connector =
611 radeon_connector->con_priv;
612
613 dp_clock = dig_connector->dp_clock;
614 }
615 }
616
617 if (radeon_encoder->is_mst_encoder) {
618 struct radeon_encoder_mst *mst_enc = radeon_encoder->enc_priv;
619 struct radeon_connector_atom_dig *dig_connector = mst_enc->connector->con_priv;
620
621 dp_clock = dig_connector->dp_clock;
622 }
623
624
625 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
626 if (radeon_crtc->ss_enabled) {
627 if (radeon_crtc->ss.refdiv) {
628 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
629 radeon_crtc->pll_reference_div = radeon_crtc->ss.refdiv;
630 if (ASIC_IS_AVIVO(rdev) &&
631 rdev->family != CHIP_RS780 &&
632 rdev->family != CHIP_RS880)
633 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
634 }
635 }
636 }
637
638 if (ASIC_IS_AVIVO(rdev)) {
639
640 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
641 adjusted_clock = mode->clock * 2;
642 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
643 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
644 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
645 radeon_crtc->pll_flags |= RADEON_PLL_IS_LCD;
646 } else {
647 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
648 radeon_crtc->pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
649 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
650 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
651 }
652
653
654 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
655 switch (bpc) {
656 case 8:
657 default:
658 break;
659 case 10:
660 clock = (clock * 5) / 4;
661 break;
662 case 12:
663 clock = (clock * 3) / 2;
664 break;
665 case 16:
666 clock = clock * 2;
667 break;
668 }
669 }
670
671
672
673
674
675 if (ASIC_IS_DCE3(rdev)) {
676 union adjust_pixel_clock args;
677 u8 frev, crev;
678 int index;
679
680 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
681 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
682 &crev))
683 return adjusted_clock;
684
685 memset(&args, 0, sizeof(args));
686
687 switch (frev) {
688 case 1:
689 switch (crev) {
690 case 1:
691 case 2:
692 args.v1.usPixelClock = cpu_to_le16(clock / 10);
693 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
694 args.v1.ucEncodeMode = encoder_mode;
695 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
696 args.v1.ucConfig |=
697 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
698
699 atom_execute_table(rdev->mode_info.atom_context,
700 index, (uint32_t *)&args);
701 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
702 break;
703 case 3:
704 args.v3.sInput.usPixelClock = cpu_to_le16(clock / 10);
705 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
706 args.v3.sInput.ucEncodeMode = encoder_mode;
707 args.v3.sInput.ucDispPllConfig = 0;
708 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
709 args.v3.sInput.ucDispPllConfig |=
710 DISPPLL_CONFIG_SS_ENABLE;
711 if (ENCODER_MODE_IS_DP(encoder_mode)) {
712 args.v3.sInput.ucDispPllConfig |=
713 DISPPLL_CONFIG_COHERENT_MODE;
714
715 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
716 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
717 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
718 if (dig->coherent_mode)
719 args.v3.sInput.ucDispPllConfig |=
720 DISPPLL_CONFIG_COHERENT_MODE;
721 if (is_duallink)
722 args.v3.sInput.ucDispPllConfig |=
723 DISPPLL_CONFIG_DUAL_LINK;
724 }
725 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
726 ENCODER_OBJECT_ID_NONE)
727 args.v3.sInput.ucExtTransmitterID =
728 radeon_encoder_get_dp_bridge_encoder_id(encoder);
729 else
730 args.v3.sInput.ucExtTransmitterID = 0;
731
732 atom_execute_table(rdev->mode_info.atom_context,
733 index, (uint32_t *)&args);
734 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
735 if (args.v3.sOutput.ucRefDiv) {
736 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
737 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
738 radeon_crtc->pll_reference_div = args.v3.sOutput.ucRefDiv;
739 }
740 if (args.v3.sOutput.ucPostDiv) {
741 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
742 radeon_crtc->pll_flags |= RADEON_PLL_USE_POST_DIV;
743 radeon_crtc->pll_post_div = args.v3.sOutput.ucPostDiv;
744 }
745 break;
746 default:
747 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
748 return adjusted_clock;
749 }
750 break;
751 default:
752 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
753 return adjusted_clock;
754 }
755 }
756 return adjusted_clock;
757}
758
759union set_pixel_clock {
760 SET_PIXEL_CLOCK_PS_ALLOCATION base;
761 PIXEL_CLOCK_PARAMETERS v1;
762 PIXEL_CLOCK_PARAMETERS_V2 v2;
763 PIXEL_CLOCK_PARAMETERS_V3 v3;
764 PIXEL_CLOCK_PARAMETERS_V5 v5;
765 PIXEL_CLOCK_PARAMETERS_V6 v6;
766};
767
768
769
770
771static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
772 u32 dispclk)
773{
774 u8 frev, crev;
775 int index;
776 union set_pixel_clock args;
777
778 memset(&args, 0, sizeof(args));
779
780 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
781 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
782 &crev))
783 return;
784
785 switch (frev) {
786 case 1:
787 switch (crev) {
788 case 5:
789
790
791
792 args.v5.ucCRTC = ATOM_CRTC_INVALID;
793 args.v5.usPixelClock = cpu_to_le16(dispclk);
794 args.v5.ucPpll = ATOM_DCPLL;
795 break;
796 case 6:
797
798
799
800 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
801 if (ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
802 args.v6.ucPpll = ATOM_EXT_PLL1;
803 else if (ASIC_IS_DCE6(rdev))
804 args.v6.ucPpll = ATOM_PPLL0;
805 else
806 args.v6.ucPpll = ATOM_DCPLL;
807 break;
808 default:
809 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
810 return;
811 }
812 break;
813 default:
814 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
815 return;
816 }
817 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
818}
819
820static void atombios_crtc_program_pll(struct drm_crtc *crtc,
821 u32 crtc_id,
822 int pll_id,
823 u32 encoder_mode,
824 u32 encoder_id,
825 u32 clock,
826 u32 ref_div,
827 u32 fb_div,
828 u32 frac_fb_div,
829 u32 post_div,
830 int bpc,
831 bool ss_enabled,
832 struct radeon_atom_ss *ss)
833{
834 struct drm_device *dev = crtc->dev;
835 struct radeon_device *rdev = dev->dev_private;
836 u8 frev, crev;
837 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
838 union set_pixel_clock args;
839
840 memset(&args, 0, sizeof(args));
841
842 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
843 &crev))
844 return;
845
846 switch (frev) {
847 case 1:
848 switch (crev) {
849 case 1:
850 if (clock == ATOM_DISABLE)
851 return;
852 args.v1.usPixelClock = cpu_to_le16(clock / 10);
853 args.v1.usRefDiv = cpu_to_le16(ref_div);
854 args.v1.usFbDiv = cpu_to_le16(fb_div);
855 args.v1.ucFracFbDiv = frac_fb_div;
856 args.v1.ucPostDiv = post_div;
857 args.v1.ucPpll = pll_id;
858 args.v1.ucCRTC = crtc_id;
859 args.v1.ucRefDivSrc = 1;
860 break;
861 case 2:
862 args.v2.usPixelClock = cpu_to_le16(clock / 10);
863 args.v2.usRefDiv = cpu_to_le16(ref_div);
864 args.v2.usFbDiv = cpu_to_le16(fb_div);
865 args.v2.ucFracFbDiv = frac_fb_div;
866 args.v2.ucPostDiv = post_div;
867 args.v2.ucPpll = pll_id;
868 args.v2.ucCRTC = crtc_id;
869 args.v2.ucRefDivSrc = 1;
870 break;
871 case 3:
872 args.v3.usPixelClock = cpu_to_le16(clock / 10);
873 args.v3.usRefDiv = cpu_to_le16(ref_div);
874 args.v3.usFbDiv = cpu_to_le16(fb_div);
875 args.v3.ucFracFbDiv = frac_fb_div;
876 args.v3.ucPostDiv = post_div;
877 args.v3.ucPpll = pll_id;
878 if (crtc_id == ATOM_CRTC2)
879 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2;
880 else
881 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1;
882 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
883 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
884 args.v3.ucTransmitterId = encoder_id;
885 args.v3.ucEncoderMode = encoder_mode;
886 break;
887 case 5:
888 args.v5.ucCRTC = crtc_id;
889 args.v5.usPixelClock = cpu_to_le16(clock / 10);
890 args.v5.ucRefDiv = ref_div;
891 args.v5.usFbDiv = cpu_to_le16(fb_div);
892 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
893 args.v5.ucPostDiv = post_div;
894 args.v5.ucMiscInfo = 0;
895 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
896 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
897 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
898 switch (bpc) {
899 case 8:
900 default:
901 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
902 break;
903 case 10:
904
905 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_32BPP;
906 break;
907 case 12:
908
909 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
910 break;
911 }
912 }
913 args.v5.ucTransmitterID = encoder_id;
914 args.v5.ucEncoderMode = encoder_mode;
915 args.v5.ucPpll = pll_id;
916 break;
917 case 6:
918 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
919 args.v6.ucRefDiv = ref_div;
920 args.v6.usFbDiv = cpu_to_le16(fb_div);
921 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
922 args.v6.ucPostDiv = post_div;
923 args.v6.ucMiscInfo = 0;
924 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
925 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
926 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
927 switch (bpc) {
928 case 8:
929 default:
930 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
931 break;
932 case 10:
933 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP_V6;
934 break;
935 case 12:
936 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP_V6;
937 break;
938 case 16:
939 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
940 break;
941 }
942 }
943 args.v6.ucTransmitterID = encoder_id;
944 args.v6.ucEncoderMode = encoder_mode;
945 args.v6.ucPpll = pll_id;
946 break;
947 default:
948 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
949 return;
950 }
951 break;
952 default:
953 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
954 return;
955 }
956
957 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
958}
959
960static bool atombios_crtc_prepare_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
961{
962 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
963 struct drm_device *dev = crtc->dev;
964 struct radeon_device *rdev = dev->dev_private;
965 struct radeon_encoder *radeon_encoder =
966 to_radeon_encoder(radeon_crtc->encoder);
967 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
968
969 radeon_crtc->bpc = 8;
970 radeon_crtc->ss_enabled = false;
971
972 if (radeon_encoder->is_mst_encoder) {
973 radeon_dp_mst_prepare_pll(crtc, mode);
974 } else if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
975 (radeon_encoder_get_dp_bridge_encoder_id(radeon_crtc->encoder) != ENCODER_OBJECT_ID_NONE)) {
976 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
977 struct drm_connector *connector =
978 radeon_get_connector_for_encoder(radeon_crtc->encoder);
979 struct radeon_connector *radeon_connector =
980 to_radeon_connector(connector);
981 struct radeon_connector_atom_dig *dig_connector =
982 radeon_connector->con_priv;
983 int dp_clock;
984
985
986 radeon_connector->pixelclock_for_modeset = mode->clock;
987 radeon_crtc->bpc = radeon_get_monitor_bpc(connector);
988
989 switch (encoder_mode) {
990 case ATOM_ENCODER_MODE_DP_MST:
991 case ATOM_ENCODER_MODE_DP:
992
993 dp_clock = dig_connector->dp_clock / 10;
994 if (ASIC_IS_DCE4(rdev))
995 radeon_crtc->ss_enabled =
996 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss,
997 ASIC_INTERNAL_SS_ON_DP,
998 dp_clock);
999 else {
1000 if (dp_clock == 16200) {
1001 radeon_crtc->ss_enabled =
1002 radeon_atombios_get_ppll_ss_info(rdev,
1003 &radeon_crtc->ss,
1004 ATOM_DP_SS_ID2);
1005 if (!radeon_crtc->ss_enabled)
1006 radeon_crtc->ss_enabled =
1007 radeon_atombios_get_ppll_ss_info(rdev,
1008 &radeon_crtc->ss,
1009 ATOM_DP_SS_ID1);
1010 } else {
1011 radeon_crtc->ss_enabled =
1012 radeon_atombios_get_ppll_ss_info(rdev,
1013 &radeon_crtc->ss,
1014 ATOM_DP_SS_ID1);
1015 }
1016
1017 radeon_crtc->ss_enabled = false;
1018 }
1019 break;
1020 case ATOM_ENCODER_MODE_LVDS:
1021 if (ASIC_IS_DCE4(rdev))
1022 radeon_crtc->ss_enabled =
1023 radeon_atombios_get_asic_ss_info(rdev,
1024 &radeon_crtc->ss,
1025 dig->lcd_ss_id,
1026 mode->clock / 10);
1027 else
1028 radeon_crtc->ss_enabled =
1029 radeon_atombios_get_ppll_ss_info(rdev,
1030 &radeon_crtc->ss,
1031 dig->lcd_ss_id);
1032 break;
1033 case ATOM_ENCODER_MODE_DVI:
1034 if (ASIC_IS_DCE4(rdev))
1035 radeon_crtc->ss_enabled =
1036 radeon_atombios_get_asic_ss_info(rdev,
1037 &radeon_crtc->ss,
1038 ASIC_INTERNAL_SS_ON_TMDS,
1039 mode->clock / 10);
1040 break;
1041 case ATOM_ENCODER_MODE_HDMI:
1042 if (ASIC_IS_DCE4(rdev))
1043 radeon_crtc->ss_enabled =
1044 radeon_atombios_get_asic_ss_info(rdev,
1045 &radeon_crtc->ss,
1046 ASIC_INTERNAL_SS_ON_HDMI,
1047 mode->clock / 10);
1048 break;
1049 default:
1050 break;
1051 }
1052 }
1053
1054
1055 radeon_crtc->adjusted_clock = atombios_adjust_pll(crtc, mode);
1056
1057 return true;
1058}
1059
1060static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
1061{
1062 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1063 struct drm_device *dev = crtc->dev;
1064 struct radeon_device *rdev = dev->dev_private;
1065 struct radeon_encoder *radeon_encoder =
1066 to_radeon_encoder(radeon_crtc->encoder);
1067 u32 pll_clock = mode->clock;
1068 u32 clock = mode->clock;
1069 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
1070 struct radeon_pll *pll;
1071 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
1072
1073
1074 if (ASIC_IS_DCE5(rdev) &&
1075 (encoder_mode == ATOM_ENCODER_MODE_HDMI) &&
1076 (radeon_crtc->bpc > 8))
1077 clock = radeon_crtc->adjusted_clock;
1078
1079 switch (radeon_crtc->pll_id) {
1080 case ATOM_PPLL1:
1081 pll = &rdev->clock.p1pll;
1082 break;
1083 case ATOM_PPLL2:
1084 pll = &rdev->clock.p2pll;
1085 break;
1086 case ATOM_DCPLL:
1087 case ATOM_PPLL_INVALID:
1088 default:
1089 pll = &rdev->clock.dcpll;
1090 break;
1091 }
1092
1093
1094 pll->flags = radeon_crtc->pll_flags;
1095 pll->reference_div = radeon_crtc->pll_reference_div;
1096 pll->post_div = radeon_crtc->pll_post_div;
1097
1098 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1099
1100 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1101 &fb_div, &frac_fb_div, &ref_div, &post_div);
1102 else if (ASIC_IS_AVIVO(rdev))
1103 radeon_compute_pll_avivo(pll, radeon_crtc->adjusted_clock, &pll_clock,
1104 &fb_div, &frac_fb_div, &ref_div, &post_div);
1105 else
1106 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1107 &fb_div, &frac_fb_div, &ref_div, &post_div);
1108
1109 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id,
1110 radeon_crtc->crtc_id, &radeon_crtc->ss);
1111
1112 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1113 encoder_mode, radeon_encoder->encoder_id, clock,
1114 ref_div, fb_div, frac_fb_div, post_div,
1115 radeon_crtc->bpc, radeon_crtc->ss_enabled, &radeon_crtc->ss);
1116
1117 if (radeon_crtc->ss_enabled) {
1118
1119 if (ASIC_IS_DCE4(rdev)) {
1120 u32 step_size;
1121 u32 amount = (((fb_div * 10) + frac_fb_div) *
1122 (u32)radeon_crtc->ss.percentage) /
1123 (100 * (u32)radeon_crtc->ss.percentage_divider);
1124 radeon_crtc->ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1125 radeon_crtc->ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1126 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1127 if (radeon_crtc->ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1128 step_size = (4 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1129 (125 * 25 * pll->reference_freq / 100);
1130 else
1131 step_size = (2 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1132 (125 * 25 * pll->reference_freq / 100);
1133 radeon_crtc->ss.step = step_size;
1134 }
1135
1136 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id,
1137 radeon_crtc->crtc_id, &radeon_crtc->ss);
1138 }
1139}
1140
1141static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1142 struct drm_framebuffer *fb,
1143 int x, int y, int atomic)
1144{
1145 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1146 struct drm_device *dev = crtc->dev;
1147 struct radeon_device *rdev = dev->dev_private;
1148 struct radeon_framebuffer *radeon_fb;
1149 struct drm_framebuffer *target_fb;
1150 struct drm_gem_object *obj;
1151 struct radeon_bo *rbo;
1152 uint64_t fb_location;
1153 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1154 unsigned bankw, bankh, mtaspect, tile_split;
1155 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1156 u32 tmp, viewport_w, viewport_h;
1157 int r;
1158 bool bypass_lut = false;
1159 struct drm_format_name_buf format_name;
1160
1161
1162 if (!atomic && !crtc->primary->fb) {
1163 DRM_DEBUG_KMS("No FB bound\n");
1164 return 0;
1165 }
1166
1167 if (atomic) {
1168 radeon_fb = to_radeon_framebuffer(fb);
1169 target_fb = fb;
1170 }
1171 else {
1172 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
1173 target_fb = crtc->primary->fb;
1174 }
1175
1176
1177
1178
1179 obj = radeon_fb->obj;
1180 rbo = gem_to_radeon_bo(obj);
1181 r = radeon_bo_reserve(rbo, false);
1182 if (unlikely(r != 0))
1183 return r;
1184
1185 if (atomic)
1186 fb_location = radeon_bo_gpu_offset(rbo);
1187 else {
1188 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1189 if (unlikely(r != 0)) {
1190 radeon_bo_unreserve(rbo);
1191 return -EINVAL;
1192 }
1193 }
1194
1195 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1196 radeon_bo_unreserve(rbo);
1197
1198 switch (target_fb->pixel_format) {
1199 case DRM_FORMAT_C8:
1200 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1201 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1202 break;
1203 case DRM_FORMAT_XRGB4444:
1204 case DRM_FORMAT_ARGB4444:
1205 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1206 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB4444));
1207#ifdef __BIG_ENDIAN
1208 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1209#endif
1210 break;
1211 case DRM_FORMAT_XRGB1555:
1212 case DRM_FORMAT_ARGB1555:
1213 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1214 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1215#ifdef __BIG_ENDIAN
1216 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1217#endif
1218 break;
1219 case DRM_FORMAT_BGRX5551:
1220 case DRM_FORMAT_BGRA5551:
1221 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1222 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA5551));
1223#ifdef __BIG_ENDIAN
1224 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1225#endif
1226 break;
1227 case DRM_FORMAT_RGB565:
1228 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1229 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1230#ifdef __BIG_ENDIAN
1231 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1232#endif
1233 break;
1234 case DRM_FORMAT_XRGB8888:
1235 case DRM_FORMAT_ARGB8888:
1236 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1237 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1238#ifdef __BIG_ENDIAN
1239 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1240#endif
1241 break;
1242 case DRM_FORMAT_XRGB2101010:
1243 case DRM_FORMAT_ARGB2101010:
1244 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1245 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB2101010));
1246#ifdef __BIG_ENDIAN
1247 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1248#endif
1249
1250 bypass_lut = true;
1251 break;
1252 case DRM_FORMAT_BGRX1010102:
1253 case DRM_FORMAT_BGRA1010102:
1254 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1255 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA1010102));
1256#ifdef __BIG_ENDIAN
1257 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1258#endif
1259
1260 bypass_lut = true;
1261 break;
1262 default:
1263 DRM_ERROR("Unsupported screen format %s\n",
1264 drm_get_format_name(target_fb->pixel_format, &format_name));
1265 return -EINVAL;
1266 }
1267
1268 if (tiling_flags & RADEON_TILING_MACRO) {
1269 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1270
1271
1272 if (rdev->family >= CHIP_TAHITI) {
1273 unsigned index, num_banks;
1274
1275 if (rdev->family >= CHIP_BONAIRE) {
1276 unsigned tileb, tile_split_bytes;
1277
1278
1279 tile_split_bytes = 64 << tile_split;
1280 tileb = 8 * 8 * target_fb->bits_per_pixel / 8;
1281 tileb = min(tile_split_bytes, tileb);
1282
1283 for (index = 0; tileb > 64; index++)
1284 tileb >>= 1;
1285
1286 if (index >= 16) {
1287 DRM_ERROR("Wrong screen bpp (%u) or tile split (%u)\n",
1288 target_fb->bits_per_pixel, tile_split);
1289 return -EINVAL;
1290 }
1291
1292 num_banks = (rdev->config.cik.macrotile_mode_array[index] >> 6) & 0x3;
1293 } else {
1294 switch (target_fb->bits_per_pixel) {
1295 case 8:
1296 index = 10;
1297 break;
1298 case 16:
1299 index = SI_TILE_MODE_COLOR_2D_SCANOUT_16BPP;
1300 break;
1301 default:
1302 case 32:
1303 index = SI_TILE_MODE_COLOR_2D_SCANOUT_32BPP;
1304 break;
1305 }
1306
1307 num_banks = (rdev->config.si.tile_mode_array[index] >> 20) & 0x3;
1308 }
1309
1310 fb_format |= EVERGREEN_GRPH_NUM_BANKS(num_banks);
1311 } else {
1312
1313 if (rdev->family >= CHIP_CAYMAN)
1314 tmp = rdev->config.cayman.tile_config;
1315 else
1316 tmp = rdev->config.evergreen.tile_config;
1317
1318 switch ((tmp & 0xf0) >> 4) {
1319 case 0:
1320 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1321 break;
1322 case 1:
1323 default:
1324 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1325 break;
1326 case 2:
1327 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1328 break;
1329 }
1330 }
1331
1332 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1333 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1334 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1335 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1336 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1337 if (rdev->family >= CHIP_BONAIRE) {
1338
1339 fb_format |= CIK_GRPH_MICRO_TILE_MODE(CIK_DISPLAY_MICRO_TILING);
1340 }
1341 } else if (tiling_flags & RADEON_TILING_MICRO)
1342 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1343
1344 if (rdev->family >= CHIP_BONAIRE) {
1345
1346
1347
1348 u32 pipe_config = (rdev->config.cik.tile_mode_array[10] >> 6) & 0x1f;
1349
1350 fb_format |= CIK_GRPH_PIPE_CONFIG(pipe_config);
1351 } else if ((rdev->family == CHIP_TAHITI) ||
1352 (rdev->family == CHIP_PITCAIRN))
1353 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1354 else if ((rdev->family == CHIP_VERDE) ||
1355 (rdev->family == CHIP_OLAND) ||
1356 (rdev->family == CHIP_HAINAN))
1357 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1358
1359 switch (radeon_crtc->crtc_id) {
1360 case 0:
1361 WREG32(AVIVO_D1VGA_CONTROL, 0);
1362 break;
1363 case 1:
1364 WREG32(AVIVO_D2VGA_CONTROL, 0);
1365 break;
1366 case 2:
1367 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1368 break;
1369 case 3:
1370 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1371 break;
1372 case 4:
1373 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1374 break;
1375 case 5:
1376 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1377 break;
1378 default:
1379 break;
1380 }
1381
1382
1383
1384
1385 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1386
1387 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1388 upper_32_bits(fb_location));
1389 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1390 upper_32_bits(fb_location));
1391 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1392 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1393 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1394 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1395 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1396 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1397
1398
1399
1400
1401
1402
1403 WREG32_P(EVERGREEN_GRPH_LUT_10BIT_BYPASS_CONTROL + radeon_crtc->crtc_offset,
1404 (bypass_lut ? EVERGREEN_LUT_10BIT_BYPASS_EN : 0),
1405 ~EVERGREEN_LUT_10BIT_BYPASS_EN);
1406
1407 if (bypass_lut)
1408 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1409
1410 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1411 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1412 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1413 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1414 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1415 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1416
1417 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1418 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1419 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1420
1421 if (rdev->family >= CHIP_BONAIRE)
1422 WREG32(CIK_LB_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1423 target_fb->height);
1424 else
1425 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1426 target_fb->height);
1427 x &= ~3;
1428 y &= ~1;
1429 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1430 (x << 16) | y);
1431 viewport_w = crtc->mode.hdisplay;
1432 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1433 if ((rdev->family >= CHIP_BONAIRE) &&
1434 (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE))
1435 viewport_h *= 2;
1436 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1437 (viewport_w << 16) | viewport_h);
1438
1439
1440 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1441
1442 if (!atomic && fb && fb != crtc->primary->fb) {
1443 radeon_fb = to_radeon_framebuffer(fb);
1444 rbo = gem_to_radeon_bo(radeon_fb->obj);
1445 r = radeon_bo_reserve(rbo, false);
1446 if (unlikely(r != 0))
1447 return r;
1448 radeon_bo_unpin(rbo);
1449 radeon_bo_unreserve(rbo);
1450 }
1451
1452
1453 radeon_bandwidth_update(rdev);
1454
1455 return 0;
1456}
1457
1458static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1459 struct drm_framebuffer *fb,
1460 int x, int y, int atomic)
1461{
1462 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1463 struct drm_device *dev = crtc->dev;
1464 struct radeon_device *rdev = dev->dev_private;
1465 struct radeon_framebuffer *radeon_fb;
1466 struct drm_gem_object *obj;
1467 struct radeon_bo *rbo;
1468 struct drm_framebuffer *target_fb;
1469 uint64_t fb_location;
1470 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1471 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1472 u32 viewport_w, viewport_h;
1473 int r;
1474 bool bypass_lut = false;
1475 struct drm_format_name_buf format_name;
1476
1477
1478 if (!atomic && !crtc->primary->fb) {
1479 DRM_DEBUG_KMS("No FB bound\n");
1480 return 0;
1481 }
1482
1483 if (atomic) {
1484 radeon_fb = to_radeon_framebuffer(fb);
1485 target_fb = fb;
1486 }
1487 else {
1488 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
1489 target_fb = crtc->primary->fb;
1490 }
1491
1492 obj = radeon_fb->obj;
1493 rbo = gem_to_radeon_bo(obj);
1494 r = radeon_bo_reserve(rbo, false);
1495 if (unlikely(r != 0))
1496 return r;
1497
1498
1499
1500
1501 if (atomic)
1502 fb_location = radeon_bo_gpu_offset(rbo);
1503 else {
1504 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1505 if (unlikely(r != 0)) {
1506 radeon_bo_unreserve(rbo);
1507 return -EINVAL;
1508 }
1509 }
1510 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1511 radeon_bo_unreserve(rbo);
1512
1513 switch (target_fb->pixel_format) {
1514 case DRM_FORMAT_C8:
1515 fb_format =
1516 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1517 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1518 break;
1519 case DRM_FORMAT_XRGB4444:
1520 case DRM_FORMAT_ARGB4444:
1521 fb_format =
1522 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1523 AVIVO_D1GRPH_CONTROL_16BPP_ARGB4444;
1524#ifdef __BIG_ENDIAN
1525 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1526#endif
1527 break;
1528 case DRM_FORMAT_XRGB1555:
1529 fb_format =
1530 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1531 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1532#ifdef __BIG_ENDIAN
1533 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1534#endif
1535 break;
1536 case DRM_FORMAT_RGB565:
1537 fb_format =
1538 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1539 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1540#ifdef __BIG_ENDIAN
1541 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1542#endif
1543 break;
1544 case DRM_FORMAT_XRGB8888:
1545 case DRM_FORMAT_ARGB8888:
1546 fb_format =
1547 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1548 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1549#ifdef __BIG_ENDIAN
1550 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1551#endif
1552 break;
1553 case DRM_FORMAT_XRGB2101010:
1554 case DRM_FORMAT_ARGB2101010:
1555 fb_format =
1556 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1557 AVIVO_D1GRPH_CONTROL_32BPP_ARGB2101010;
1558#ifdef __BIG_ENDIAN
1559 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1560#endif
1561
1562 bypass_lut = true;
1563 break;
1564 default:
1565 DRM_ERROR("Unsupported screen format %s\n",
1566 drm_get_format_name(target_fb->pixel_format, &format_name));
1567 return -EINVAL;
1568 }
1569
1570 if (rdev->family >= CHIP_R600) {
1571 if (tiling_flags & RADEON_TILING_MACRO)
1572 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1573 else if (tiling_flags & RADEON_TILING_MICRO)
1574 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1575 } else {
1576 if (tiling_flags & RADEON_TILING_MACRO)
1577 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1578
1579 if (tiling_flags & RADEON_TILING_MICRO)
1580 fb_format |= AVIVO_D1GRPH_TILED;
1581 }
1582
1583 if (radeon_crtc->crtc_id == 0)
1584 WREG32(AVIVO_D1VGA_CONTROL, 0);
1585 else
1586 WREG32(AVIVO_D2VGA_CONTROL, 0);
1587
1588
1589
1590
1591 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1592
1593 if (rdev->family >= CHIP_RV770) {
1594 if (radeon_crtc->crtc_id) {
1595 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1596 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1597 } else {
1598 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1599 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1600 }
1601 }
1602 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1603 (u32) fb_location);
1604 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1605 radeon_crtc->crtc_offset, (u32) fb_location);
1606 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1607 if (rdev->family >= CHIP_R600)
1608 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1609
1610
1611 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset,
1612 (bypass_lut ? AVIVO_LUT_10BIT_BYPASS_EN : 0), ~AVIVO_LUT_10BIT_BYPASS_EN);
1613
1614 if (bypass_lut)
1615 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1616
1617 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1618 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1619 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1620 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1621 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1622 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1623
1624 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1625 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1626 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1627
1628 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1629 target_fb->height);
1630 x &= ~3;
1631 y &= ~1;
1632 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1633 (x << 16) | y);
1634 viewport_w = crtc->mode.hdisplay;
1635 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1636 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1637 (viewport_w << 16) | viewport_h);
1638
1639
1640 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 3);
1641
1642 if (!atomic && fb && fb != crtc->primary->fb) {
1643 radeon_fb = to_radeon_framebuffer(fb);
1644 rbo = gem_to_radeon_bo(radeon_fb->obj);
1645 r = radeon_bo_reserve(rbo, false);
1646 if (unlikely(r != 0))
1647 return r;
1648 radeon_bo_unpin(rbo);
1649 radeon_bo_unreserve(rbo);
1650 }
1651
1652
1653 radeon_bandwidth_update(rdev);
1654
1655 return 0;
1656}
1657
1658int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1659 struct drm_framebuffer *old_fb)
1660{
1661 struct drm_device *dev = crtc->dev;
1662 struct radeon_device *rdev = dev->dev_private;
1663
1664 if (ASIC_IS_DCE4(rdev))
1665 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1666 else if (ASIC_IS_AVIVO(rdev))
1667 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1668 else
1669 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1670}
1671
1672int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1673 struct drm_framebuffer *fb,
1674 int x, int y, enum mode_set_atomic state)
1675{
1676 struct drm_device *dev = crtc->dev;
1677 struct radeon_device *rdev = dev->dev_private;
1678
1679 if (ASIC_IS_DCE4(rdev))
1680 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1681 else if (ASIC_IS_AVIVO(rdev))
1682 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1683 else
1684 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1685}
1686
1687
1688static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1689{
1690 struct drm_device *dev = crtc->dev;
1691 struct radeon_device *rdev = dev->dev_private;
1692 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1693 u32 disp_merge_cntl;
1694
1695 switch (radeon_crtc->crtc_id) {
1696 case 0:
1697 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1698 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1699 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1700 break;
1701 case 1:
1702 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1703 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1704 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1705 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1706 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1707 break;
1708 }
1709}
1710
1711
1712
1713
1714
1715
1716
1717
1718static u32 radeon_get_pll_use_mask(struct drm_crtc *crtc)
1719{
1720 struct drm_device *dev = crtc->dev;
1721 struct drm_crtc *test_crtc;
1722 struct radeon_crtc *test_radeon_crtc;
1723 u32 pll_in_use = 0;
1724
1725 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1726 if (crtc == test_crtc)
1727 continue;
1728
1729 test_radeon_crtc = to_radeon_crtc(test_crtc);
1730 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1731 pll_in_use |= (1 << test_radeon_crtc->pll_id);
1732 }
1733 return pll_in_use;
1734}
1735
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745static int radeon_get_shared_dp_ppll(struct drm_crtc *crtc)
1746{
1747 struct drm_device *dev = crtc->dev;
1748 struct radeon_device *rdev = dev->dev_private;
1749 struct drm_crtc *test_crtc;
1750 struct radeon_crtc *test_radeon_crtc;
1751
1752 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1753 if (crtc == test_crtc)
1754 continue;
1755 test_radeon_crtc = to_radeon_crtc(test_crtc);
1756 if (test_radeon_crtc->encoder &&
1757 ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1758
1759 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1760 test_radeon_crtc->pll_id == ATOM_PPLL2)
1761 continue;
1762
1763 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1764 return test_radeon_crtc->pll_id;
1765 }
1766 }
1767 return ATOM_PPLL_INVALID;
1768}
1769
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779static int radeon_get_shared_nondp_ppll(struct drm_crtc *crtc)
1780{
1781 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1782 struct drm_device *dev = crtc->dev;
1783 struct radeon_device *rdev = dev->dev_private;
1784 struct drm_crtc *test_crtc;
1785 struct radeon_crtc *test_radeon_crtc;
1786 u32 adjusted_clock, test_adjusted_clock;
1787
1788 adjusted_clock = radeon_crtc->adjusted_clock;
1789
1790 if (adjusted_clock == 0)
1791 return ATOM_PPLL_INVALID;
1792
1793 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1794 if (crtc == test_crtc)
1795 continue;
1796 test_radeon_crtc = to_radeon_crtc(test_crtc);
1797 if (test_radeon_crtc->encoder &&
1798 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1799
1800 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1801 test_radeon_crtc->pll_id == ATOM_PPLL2)
1802 continue;
1803
1804 if (test_radeon_crtc->connector == radeon_crtc->connector) {
1805
1806 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1807 return test_radeon_crtc->pll_id;
1808 }
1809
1810 test_adjusted_clock = test_radeon_crtc->adjusted_clock;
1811 if ((crtc->mode.clock == test_crtc->mode.clock) &&
1812 (adjusted_clock == test_adjusted_clock) &&
1813 (radeon_crtc->ss_enabled == test_radeon_crtc->ss_enabled) &&
1814 (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID))
1815 return test_radeon_crtc->pll_id;
1816 }
1817 }
1818 return ATOM_PPLL_INVALID;
1819}
1820
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1859{
1860 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1861 struct drm_device *dev = crtc->dev;
1862 struct radeon_device *rdev = dev->dev_private;
1863 struct radeon_encoder *radeon_encoder =
1864 to_radeon_encoder(radeon_crtc->encoder);
1865 u32 pll_in_use;
1866 int pll;
1867
1868 if (ASIC_IS_DCE8(rdev)) {
1869 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1870 if (rdev->clock.dp_extclk)
1871
1872 return ATOM_PPLL_INVALID;
1873 else {
1874
1875 pll = radeon_get_shared_dp_ppll(crtc);
1876 if (pll != ATOM_PPLL_INVALID)
1877 return pll;
1878 }
1879 } else {
1880
1881 pll = radeon_get_shared_nondp_ppll(crtc);
1882 if (pll != ATOM_PPLL_INVALID)
1883 return pll;
1884 }
1885
1886 if ((rdev->family == CHIP_KABINI) ||
1887 (rdev->family == CHIP_MULLINS)) {
1888
1889 pll_in_use = radeon_get_pll_use_mask(crtc);
1890 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1891 return ATOM_PPLL2;
1892 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1893 return ATOM_PPLL1;
1894 DRM_ERROR("unable to allocate a PPLL\n");
1895 return ATOM_PPLL_INVALID;
1896 } else {
1897
1898 pll_in_use = radeon_get_pll_use_mask(crtc);
1899 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1900 return ATOM_PPLL2;
1901 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1902 return ATOM_PPLL1;
1903 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1904 return ATOM_PPLL0;
1905 DRM_ERROR("unable to allocate a PPLL\n");
1906 return ATOM_PPLL_INVALID;
1907 }
1908 } else if (ASIC_IS_DCE61(rdev)) {
1909 struct radeon_encoder_atom_dig *dig =
1910 radeon_encoder->enc_priv;
1911
1912 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1913 (dig->linkb == false))
1914
1915 return ATOM_PPLL2;
1916 else if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1917
1918 if (rdev->clock.dp_extclk)
1919
1920 return ATOM_PPLL_INVALID;
1921 else {
1922
1923 pll = radeon_get_shared_dp_ppll(crtc);
1924 if (pll != ATOM_PPLL_INVALID)
1925 return pll;
1926 }
1927 } else {
1928
1929 pll = radeon_get_shared_nondp_ppll(crtc);
1930 if (pll != ATOM_PPLL_INVALID)
1931 return pll;
1932 }
1933
1934 pll_in_use = radeon_get_pll_use_mask(crtc);
1935 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1936 return ATOM_PPLL0;
1937 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1938 return ATOM_PPLL1;
1939 DRM_ERROR("unable to allocate a PPLL\n");
1940 return ATOM_PPLL_INVALID;
1941 } else if (ASIC_IS_DCE41(rdev)) {
1942
1943 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1944 if (rdev->clock.dp_extclk)
1945
1946 return ATOM_PPLL_INVALID;
1947 }
1948 pll_in_use = radeon_get_pll_use_mask(crtc);
1949 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1950 return ATOM_PPLL1;
1951 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1952 return ATOM_PPLL2;
1953 DRM_ERROR("unable to allocate a PPLL\n");
1954 return ATOM_PPLL_INVALID;
1955 } else if (ASIC_IS_DCE4(rdev)) {
1956
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1967 if (rdev->clock.dp_extclk)
1968
1969 return ATOM_PPLL_INVALID;
1970 else if (ASIC_IS_DCE6(rdev))
1971
1972 return ATOM_PPLL0;
1973 else if (ASIC_IS_DCE5(rdev))
1974
1975 return ATOM_DCPLL;
1976 else {
1977
1978 pll = radeon_get_shared_dp_ppll(crtc);
1979 if (pll != ATOM_PPLL_INVALID)
1980 return pll;
1981 }
1982 } else {
1983
1984 pll = radeon_get_shared_nondp_ppll(crtc);
1985 if (pll != ATOM_PPLL_INVALID)
1986 return pll;
1987 }
1988
1989 pll_in_use = radeon_get_pll_use_mask(crtc);
1990 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1991 return ATOM_PPLL1;
1992 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1993 return ATOM_PPLL2;
1994 DRM_ERROR("unable to allocate a PPLL\n");
1995 return ATOM_PPLL_INVALID;
1996 } else {
1997
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012 return radeon_crtc->crtc_id;
2013 }
2014}
2015
2016void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
2017{
2018
2019 if (ASIC_IS_DCE6(rdev))
2020 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2021 else if (ASIC_IS_DCE4(rdev)) {
2022 struct radeon_atom_ss ss;
2023 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
2024 ASIC_INTERNAL_SS_ON_DCPLL,
2025 rdev->clock.default_dispclk);
2026 if (ss_enabled)
2027 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
2028
2029 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2030 if (ss_enabled)
2031 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
2032 }
2033
2034}
2035
2036int atombios_crtc_mode_set(struct drm_crtc *crtc,
2037 struct drm_display_mode *mode,
2038 struct drm_display_mode *adjusted_mode,
2039 int x, int y, struct drm_framebuffer *old_fb)
2040{
2041 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2042 struct drm_device *dev = crtc->dev;
2043 struct radeon_device *rdev = dev->dev_private;
2044 struct radeon_encoder *radeon_encoder =
2045 to_radeon_encoder(radeon_crtc->encoder);
2046 bool is_tvcv = false;
2047
2048 if (radeon_encoder->active_device &
2049 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
2050 is_tvcv = true;
2051
2052 if (!radeon_crtc->adjusted_clock)
2053 return -EINVAL;
2054
2055 atombios_crtc_set_pll(crtc, adjusted_mode);
2056
2057 if (ASIC_IS_DCE4(rdev))
2058 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2059 else if (ASIC_IS_AVIVO(rdev)) {
2060 if (is_tvcv)
2061 atombios_crtc_set_timing(crtc, adjusted_mode);
2062 else
2063 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2064 } else {
2065 atombios_crtc_set_timing(crtc, adjusted_mode);
2066 if (radeon_crtc->crtc_id == 0)
2067 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2068 radeon_legacy_atom_fixup(crtc);
2069 }
2070 atombios_crtc_set_base(crtc, x, y, old_fb);
2071 atombios_overscan_setup(crtc, mode, adjusted_mode);
2072 atombios_scaler_setup(crtc);
2073 radeon_cursor_reset(crtc);
2074
2075 radeon_crtc->hw_mode = *adjusted_mode;
2076
2077 return 0;
2078}
2079
2080static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
2081 const struct drm_display_mode *mode,
2082 struct drm_display_mode *adjusted_mode)
2083{
2084 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2085 struct drm_device *dev = crtc->dev;
2086 struct drm_encoder *encoder;
2087
2088
2089 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2090 if (encoder->crtc == crtc) {
2091 radeon_crtc->encoder = encoder;
2092 radeon_crtc->connector = radeon_get_connector_for_encoder(encoder);
2093 break;
2094 }
2095 }
2096 if ((radeon_crtc->encoder == NULL) || (radeon_crtc->connector == NULL)) {
2097 radeon_crtc->encoder = NULL;
2098 radeon_crtc->connector = NULL;
2099 return false;
2100 }
2101 if (radeon_crtc->encoder) {
2102 struct radeon_encoder *radeon_encoder =
2103 to_radeon_encoder(radeon_crtc->encoder);
2104
2105 radeon_crtc->output_csc = radeon_encoder->output_csc;
2106 }
2107 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
2108 return false;
2109 if (!atombios_crtc_prepare_pll(crtc, adjusted_mode))
2110 return false;
2111
2112 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
2113
2114 if ((radeon_crtc->pll_id == ATOM_PPLL_INVALID) &&
2115 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder)))
2116 return false;
2117
2118 return true;
2119}
2120
2121static void atombios_crtc_prepare(struct drm_crtc *crtc)
2122{
2123 struct drm_device *dev = crtc->dev;
2124 struct radeon_device *rdev = dev->dev_private;
2125
2126
2127 if (ASIC_IS_DCE6(rdev))
2128 atombios_powergate_crtc(crtc, ATOM_DISABLE);
2129
2130 atombios_lock_crtc(crtc, ATOM_ENABLE);
2131 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2132}
2133
2134static void atombios_crtc_commit(struct drm_crtc *crtc)
2135{
2136 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
2137 atombios_lock_crtc(crtc, ATOM_DISABLE);
2138}
2139
2140static void atombios_crtc_disable(struct drm_crtc *crtc)
2141{
2142 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2143 struct drm_device *dev = crtc->dev;
2144 struct radeon_device *rdev = dev->dev_private;
2145 struct radeon_atom_ss ss;
2146 int i;
2147
2148 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2149 if (crtc->primary->fb) {
2150 int r;
2151 struct radeon_framebuffer *radeon_fb;
2152 struct radeon_bo *rbo;
2153
2154 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
2155 rbo = gem_to_radeon_bo(radeon_fb->obj);
2156 r = radeon_bo_reserve(rbo, false);
2157 if (unlikely(r))
2158 DRM_ERROR("failed to reserve rbo before unpin\n");
2159 else {
2160 radeon_bo_unpin(rbo);
2161 radeon_bo_unreserve(rbo);
2162 }
2163 }
2164
2165 if (ASIC_IS_DCE4(rdev))
2166 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2167 else if (ASIC_IS_AVIVO(rdev))
2168 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2169
2170 if (ASIC_IS_DCE6(rdev))
2171 atombios_powergate_crtc(crtc, ATOM_ENABLE);
2172
2173 for (i = 0; i < rdev->num_crtc; i++) {
2174 if (rdev->mode_info.crtcs[i] &&
2175 rdev->mode_info.crtcs[i]->enabled &&
2176 i != radeon_crtc->crtc_id &&
2177 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) {
2178
2179
2180
2181 goto done;
2182 }
2183 }
2184
2185 switch (radeon_crtc->pll_id) {
2186 case ATOM_PPLL1:
2187 case ATOM_PPLL2:
2188
2189 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2190 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2191 break;
2192 case ATOM_PPLL0:
2193
2194 if ((rdev->family == CHIP_ARUBA) ||
2195 (rdev->family == CHIP_KAVERI) ||
2196 (rdev->family == CHIP_BONAIRE) ||
2197 (rdev->family == CHIP_HAWAII))
2198 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2199 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2200 break;
2201 default:
2202 break;
2203 }
2204done:
2205 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2206 radeon_crtc->adjusted_clock = 0;
2207 radeon_crtc->encoder = NULL;
2208 radeon_crtc->connector = NULL;
2209}
2210
2211static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
2212 .dpms = atombios_crtc_dpms,
2213 .mode_fixup = atombios_crtc_mode_fixup,
2214 .mode_set = atombios_crtc_mode_set,
2215 .mode_set_base = atombios_crtc_set_base,
2216 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
2217 .prepare = atombios_crtc_prepare,
2218 .commit = atombios_crtc_commit,
2219 .load_lut = radeon_crtc_load_lut,
2220 .disable = atombios_crtc_disable,
2221};
2222
2223void radeon_atombios_init_crtc(struct drm_device *dev,
2224 struct radeon_crtc *radeon_crtc)
2225{
2226 struct radeon_device *rdev = dev->dev_private;
2227
2228 if (ASIC_IS_DCE4(rdev)) {
2229 switch (radeon_crtc->crtc_id) {
2230 case 0:
2231 default:
2232 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
2233 break;
2234 case 1:
2235 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
2236 break;
2237 case 2:
2238 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
2239 break;
2240 case 3:
2241 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
2242 break;
2243 case 4:
2244 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
2245 break;
2246 case 5:
2247 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
2248 break;
2249 }
2250 } else {
2251 if (radeon_crtc->crtc_id == 1)
2252 radeon_crtc->crtc_offset =
2253 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
2254 else
2255 radeon_crtc->crtc_offset = 0;
2256 }
2257 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2258 radeon_crtc->adjusted_clock = 0;
2259 radeon_crtc->encoder = NULL;
2260 radeon_crtc->connector = NULL;
2261 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
2262}
2263