1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <drm/drmP.h>
27#include <drm/drm_crtc_helper.h>
28#include <drm/drm_fb_helper.h>
29#include <drm/radeon_drm.h>
30#include <drm/drm_fixed.h>
31#include "radeon.h"
32#include "atom.h"
33#include "atom-bits.h"
34
35static void atombios_overscan_setup(struct drm_crtc *crtc,
36 struct drm_display_mode *mode,
37 struct drm_display_mode *adjusted_mode)
38{
39 struct drm_device *dev = crtc->dev;
40 struct radeon_device *rdev = dev->dev_private;
41 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
42 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
43 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
44 int a1, a2;
45
46 memset(&args, 0, sizeof(args));
47
48 args.ucCRTC = radeon_crtc->crtc_id;
49
50 switch (radeon_crtc->rmx_type) {
51 case RMX_CENTER:
52 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
53 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
54 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
55 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
56 break;
57 case RMX_ASPECT:
58 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
59 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
60
61 if (a1 > a2) {
62 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
63 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
64 } else if (a2 > a1) {
65 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
66 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
67 }
68 break;
69 case RMX_FULL:
70 default:
71 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
72 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
73 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
74 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
75 break;
76 }
77 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
78}
79
80static void atombios_scaler_setup(struct drm_crtc *crtc)
81{
82 struct drm_device *dev = crtc->dev;
83 struct radeon_device *rdev = dev->dev_private;
84 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
85 ENABLE_SCALER_PS_ALLOCATION args;
86 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
87 struct radeon_encoder *radeon_encoder =
88 to_radeon_encoder(radeon_crtc->encoder);
89
90 enum radeon_tv_std tv_std = TV_STD_NTSC;
91 bool is_tv = false, is_cv = false;
92
93 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
94 return;
95
96 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
97 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
98 tv_std = tv_dac->tv_std;
99 is_tv = true;
100 }
101
102 memset(&args, 0, sizeof(args));
103
104 args.ucScaler = radeon_crtc->crtc_id;
105
106 if (is_tv) {
107 switch (tv_std) {
108 case TV_STD_NTSC:
109 default:
110 args.ucTVStandard = ATOM_TV_NTSC;
111 break;
112 case TV_STD_PAL:
113 args.ucTVStandard = ATOM_TV_PAL;
114 break;
115 case TV_STD_PAL_M:
116 args.ucTVStandard = ATOM_TV_PALM;
117 break;
118 case TV_STD_PAL_60:
119 args.ucTVStandard = ATOM_TV_PAL60;
120 break;
121 case TV_STD_NTSC_J:
122 args.ucTVStandard = ATOM_TV_NTSCJ;
123 break;
124 case TV_STD_SCART_PAL:
125 args.ucTVStandard = ATOM_TV_PAL;
126 break;
127 case TV_STD_SECAM:
128 args.ucTVStandard = ATOM_TV_SECAM;
129 break;
130 case TV_STD_PAL_CN:
131 args.ucTVStandard = ATOM_TV_PALCN;
132 break;
133 }
134 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
135 } else if (is_cv) {
136 args.ucTVStandard = ATOM_TV_CV;
137 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
138 } else {
139 switch (radeon_crtc->rmx_type) {
140 case RMX_FULL:
141 args.ucEnable = ATOM_SCALER_EXPANSION;
142 break;
143 case RMX_CENTER:
144 args.ucEnable = ATOM_SCALER_CENTER;
145 break;
146 case RMX_ASPECT:
147 args.ucEnable = ATOM_SCALER_EXPANSION;
148 break;
149 default:
150 if (ASIC_IS_AVIVO(rdev))
151 args.ucEnable = ATOM_SCALER_DISABLE;
152 else
153 args.ucEnable = ATOM_SCALER_CENTER;
154 break;
155 }
156 }
157 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
158 if ((is_tv || is_cv)
159 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
160 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
161 }
162}
163
164static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
165{
166 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
167 struct drm_device *dev = crtc->dev;
168 struct radeon_device *rdev = dev->dev_private;
169 int index =
170 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
171 ENABLE_CRTC_PS_ALLOCATION args;
172
173 memset(&args, 0, sizeof(args));
174
175 args.ucCRTC = radeon_crtc->crtc_id;
176 args.ucEnable = lock;
177
178 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
179}
180
181static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
182{
183 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
184 struct drm_device *dev = crtc->dev;
185 struct radeon_device *rdev = dev->dev_private;
186 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
187 ENABLE_CRTC_PS_ALLOCATION args;
188
189 memset(&args, 0, sizeof(args));
190
191 args.ucCRTC = radeon_crtc->crtc_id;
192 args.ucEnable = state;
193
194 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
195}
196
197static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
198{
199 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
200 struct drm_device *dev = crtc->dev;
201 struct radeon_device *rdev = dev->dev_private;
202 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
203 ENABLE_CRTC_PS_ALLOCATION args;
204
205 memset(&args, 0, sizeof(args));
206
207 args.ucCRTC = radeon_crtc->crtc_id;
208 args.ucEnable = state;
209
210 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
211}
212
213static const u32 vga_control_regs[6] =
214{
215 AVIVO_D1VGA_CONTROL,
216 AVIVO_D2VGA_CONTROL,
217 EVERGREEN_D3VGA_CONTROL,
218 EVERGREEN_D4VGA_CONTROL,
219 EVERGREEN_D5VGA_CONTROL,
220 EVERGREEN_D6VGA_CONTROL,
221};
222
223static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
224{
225 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
226 struct drm_device *dev = crtc->dev;
227 struct radeon_device *rdev = dev->dev_private;
228 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
229 BLANK_CRTC_PS_ALLOCATION args;
230 u32 vga_control = 0;
231
232 memset(&args, 0, sizeof(args));
233
234 if (ASIC_IS_DCE8(rdev)) {
235 vga_control = RREG32(vga_control_regs[radeon_crtc->crtc_id]);
236 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control | 1);
237 }
238
239 args.ucCRTC = radeon_crtc->crtc_id;
240 args.ucBlanking = state;
241
242 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
243
244 if (ASIC_IS_DCE8(rdev)) {
245 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control);
246 }
247}
248
249static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
250{
251 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
252 struct drm_device *dev = crtc->dev;
253 struct radeon_device *rdev = dev->dev_private;
254 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
255 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
256
257 memset(&args, 0, sizeof(args));
258
259 args.ucDispPipeId = radeon_crtc->crtc_id;
260 args.ucEnable = state;
261
262 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
263}
264
265void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
266{
267 struct drm_device *dev = crtc->dev;
268 struct radeon_device *rdev = dev->dev_private;
269 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
270
271 switch (mode) {
272 case DRM_MODE_DPMS_ON:
273 radeon_crtc->enabled = true;
274 atombios_enable_crtc(crtc, ATOM_ENABLE);
275 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
276 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
277 atombios_blank_crtc(crtc, ATOM_DISABLE);
278 if (dev->num_crtcs > radeon_crtc->crtc_id)
279 drm_crtc_vblank_on(crtc);
280 radeon_crtc_load_lut(crtc);
281 break;
282 case DRM_MODE_DPMS_STANDBY:
283 case DRM_MODE_DPMS_SUSPEND:
284 case DRM_MODE_DPMS_OFF:
285 if (dev->num_crtcs > radeon_crtc->crtc_id)
286 drm_crtc_vblank_off(crtc);
287 if (radeon_crtc->enabled)
288 atombios_blank_crtc(crtc, ATOM_ENABLE);
289 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
290 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
291 atombios_enable_crtc(crtc, ATOM_DISABLE);
292 radeon_crtc->enabled = false;
293 break;
294 }
295
296 radeon_pm_compute_clocks(rdev);
297}
298
299static void
300atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
301 struct drm_display_mode *mode)
302{
303 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
304 struct drm_device *dev = crtc->dev;
305 struct radeon_device *rdev = dev->dev_private;
306 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
307 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
308 u16 misc = 0;
309
310 memset(&args, 0, sizeof(args));
311 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
312 args.usH_Blanking_Time =
313 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
314 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
315 args.usV_Blanking_Time =
316 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
317 args.usH_SyncOffset =
318 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
319 args.usH_SyncWidth =
320 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
321 args.usV_SyncOffset =
322 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
323 args.usV_SyncWidth =
324 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
325 args.ucH_Border = radeon_crtc->h_border;
326 args.ucV_Border = radeon_crtc->v_border;
327
328 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
329 misc |= ATOM_VSYNC_POLARITY;
330 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
331 misc |= ATOM_HSYNC_POLARITY;
332 if (mode->flags & DRM_MODE_FLAG_CSYNC)
333 misc |= ATOM_COMPOSITESYNC;
334 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
335 misc |= ATOM_INTERLACE;
336 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
337 misc |= ATOM_DOUBLE_CLOCK_MODE;
338 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
339 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
340
341 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
342 args.ucCRTC = radeon_crtc->crtc_id;
343
344 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
345}
346
347static void atombios_crtc_set_timing(struct drm_crtc *crtc,
348 struct drm_display_mode *mode)
349{
350 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
351 struct drm_device *dev = crtc->dev;
352 struct radeon_device *rdev = dev->dev_private;
353 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
354 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
355 u16 misc = 0;
356
357 memset(&args, 0, sizeof(args));
358 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
359 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
360 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
361 args.usH_SyncWidth =
362 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
363 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
364 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
365 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
366 args.usV_SyncWidth =
367 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
368
369 args.ucOverscanRight = radeon_crtc->h_border;
370 args.ucOverscanLeft = radeon_crtc->h_border;
371 args.ucOverscanBottom = radeon_crtc->v_border;
372 args.ucOverscanTop = radeon_crtc->v_border;
373
374 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
375 misc |= ATOM_VSYNC_POLARITY;
376 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
377 misc |= ATOM_HSYNC_POLARITY;
378 if (mode->flags & DRM_MODE_FLAG_CSYNC)
379 misc |= ATOM_COMPOSITESYNC;
380 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
381 misc |= ATOM_INTERLACE;
382 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
383 misc |= ATOM_DOUBLE_CLOCK_MODE;
384 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
385 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
386
387 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
388 args.ucCRTC = radeon_crtc->crtc_id;
389
390 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
391}
392
393static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
394{
395 u32 ss_cntl;
396
397 if (ASIC_IS_DCE4(rdev)) {
398 switch (pll_id) {
399 case ATOM_PPLL1:
400 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
401 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
402 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
403 break;
404 case ATOM_PPLL2:
405 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
406 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
407 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
408 break;
409 case ATOM_DCPLL:
410 case ATOM_PPLL_INVALID:
411 return;
412 }
413 } else if (ASIC_IS_AVIVO(rdev)) {
414 switch (pll_id) {
415 case ATOM_PPLL1:
416 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
417 ss_cntl &= ~1;
418 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
419 break;
420 case ATOM_PPLL2:
421 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
422 ss_cntl &= ~1;
423 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
424 break;
425 case ATOM_DCPLL:
426 case ATOM_PPLL_INVALID:
427 return;
428 }
429 }
430}
431
432
433union atom_enable_ss {
434 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
435 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
436 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
437 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
438 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
439};
440
441static void atombios_crtc_program_ss(struct radeon_device *rdev,
442 int enable,
443 int pll_id,
444 int crtc_id,
445 struct radeon_atom_ss *ss)
446{
447 unsigned i;
448 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
449 union atom_enable_ss args;
450
451 if (enable) {
452
453
454
455
456
457 if (ss->percentage == 0)
458 return;
459 if (ss->type & ATOM_EXTERNAL_SS_MASK)
460 return;
461 } else {
462 for (i = 0; i < rdev->num_crtc; i++) {
463 if (rdev->mode_info.crtcs[i] &&
464 rdev->mode_info.crtcs[i]->enabled &&
465 i != crtc_id &&
466 pll_id == rdev->mode_info.crtcs[i]->pll_id) {
467
468
469
470
471 return;
472 }
473 }
474 }
475
476 memset(&args, 0, sizeof(args));
477
478 if (ASIC_IS_DCE5(rdev)) {
479 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
480 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
481 switch (pll_id) {
482 case ATOM_PPLL1:
483 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
484 break;
485 case ATOM_PPLL2:
486 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
487 break;
488 case ATOM_DCPLL:
489 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
490 break;
491 case ATOM_PPLL_INVALID:
492 return;
493 }
494 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
495 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
496 args.v3.ucEnable = enable;
497 } else if (ASIC_IS_DCE4(rdev)) {
498 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
499 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
500 switch (pll_id) {
501 case ATOM_PPLL1:
502 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
503 break;
504 case ATOM_PPLL2:
505 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
506 break;
507 case ATOM_DCPLL:
508 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
509 break;
510 case ATOM_PPLL_INVALID:
511 return;
512 }
513 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
514 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
515 args.v2.ucEnable = enable;
516 } else if (ASIC_IS_DCE3(rdev)) {
517 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
518 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
519 args.v1.ucSpreadSpectrumStep = ss->step;
520 args.v1.ucSpreadSpectrumDelay = ss->delay;
521 args.v1.ucSpreadSpectrumRange = ss->range;
522 args.v1.ucPpll = pll_id;
523 args.v1.ucEnable = enable;
524 } else if (ASIC_IS_AVIVO(rdev)) {
525 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
526 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
527 atombios_disable_ss(rdev, pll_id);
528 return;
529 }
530 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
531 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
532 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
533 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
534 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
535 args.lvds_ss_2.ucEnable = enable;
536 } else {
537 if (enable == ATOM_DISABLE) {
538 atombios_disable_ss(rdev, pll_id);
539 return;
540 }
541 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
542 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
543 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
544 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
545 args.lvds_ss.ucEnable = enable;
546 }
547 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
548}
549
550union adjust_pixel_clock {
551 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
552 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
553};
554
555static u32 atombios_adjust_pll(struct drm_crtc *crtc,
556 struct drm_display_mode *mode)
557{
558 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
559 struct drm_device *dev = crtc->dev;
560 struct radeon_device *rdev = dev->dev_private;
561 struct drm_encoder *encoder = radeon_crtc->encoder;
562 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
563 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
564 u32 adjusted_clock = mode->clock;
565 int encoder_mode = atombios_get_encoder_mode(encoder);
566 u32 dp_clock = mode->clock;
567 u32 clock = mode->clock;
568 int bpc = radeon_crtc->bpc;
569 bool is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
570
571
572 radeon_crtc->pll_flags = 0;
573
574 if (ASIC_IS_AVIVO(rdev)) {
575 if ((rdev->family == CHIP_RS600) ||
576 (rdev->family == CHIP_RS690) ||
577 (rdev->family == CHIP_RS740))
578 radeon_crtc->pll_flags |= (
579 RADEON_PLL_PREFER_CLOSEST_LOWER);
580
581 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)
582 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
583 else
584 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
585
586 if (rdev->family < CHIP_RV770)
587 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
588
589 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
590 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
591
592 if (((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880))
593 && !radeon_crtc->ss_enabled)
594 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
595 if (ASIC_IS_DCE32(rdev) && mode->clock > 165000)
596 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
597 } else {
598 radeon_crtc->pll_flags |= RADEON_PLL_LEGACY;
599
600 if (mode->clock > 200000)
601 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
602 else
603 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
604 }
605
606 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
607 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
608 if (connector) {
609 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
610 struct radeon_connector_atom_dig *dig_connector =
611 radeon_connector->con_priv;
612
613 dp_clock = dig_connector->dp_clock;
614 }
615 }
616
617 if (radeon_encoder->is_mst_encoder) {
618 struct radeon_encoder_mst *mst_enc = radeon_encoder->enc_priv;
619 struct radeon_connector_atom_dig *dig_connector = mst_enc->connector->con_priv;
620
621 dp_clock = dig_connector->dp_clock;
622 }
623
624
625 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
626 if (radeon_crtc->ss_enabled) {
627 if (radeon_crtc->ss.refdiv) {
628 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
629 radeon_crtc->pll_reference_div = radeon_crtc->ss.refdiv;
630 if (ASIC_IS_AVIVO(rdev) &&
631 rdev->family != CHIP_RS780 &&
632 rdev->family != CHIP_RS880)
633 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
634 }
635 }
636 }
637
638 if (ASIC_IS_AVIVO(rdev)) {
639
640 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
641 adjusted_clock = mode->clock * 2;
642 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
643 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
644 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
645 radeon_crtc->pll_flags |= RADEON_PLL_IS_LCD;
646 } else {
647 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
648 radeon_crtc->pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
649 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
650 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
651 }
652
653
654 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
655 switch (bpc) {
656 case 8:
657 default:
658 break;
659 case 10:
660 clock = (clock * 5) / 4;
661 break;
662 case 12:
663 clock = (clock * 3) / 2;
664 break;
665 case 16:
666 clock = clock * 2;
667 break;
668 }
669 }
670
671
672
673
674
675 if (ASIC_IS_DCE3(rdev)) {
676 union adjust_pixel_clock args;
677 u8 frev, crev;
678 int index;
679
680 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
681 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
682 &crev))
683 return adjusted_clock;
684
685 memset(&args, 0, sizeof(args));
686
687 switch (frev) {
688 case 1:
689 switch (crev) {
690 case 1:
691 case 2:
692 args.v1.usPixelClock = cpu_to_le16(clock / 10);
693 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
694 args.v1.ucEncodeMode = encoder_mode;
695 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
696 args.v1.ucConfig |=
697 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
698
699 atom_execute_table(rdev->mode_info.atom_context,
700 index, (uint32_t *)&args);
701 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
702 break;
703 case 3:
704 args.v3.sInput.usPixelClock = cpu_to_le16(clock / 10);
705 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
706 args.v3.sInput.ucEncodeMode = encoder_mode;
707 args.v3.sInput.ucDispPllConfig = 0;
708 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
709 args.v3.sInput.ucDispPllConfig |=
710 DISPPLL_CONFIG_SS_ENABLE;
711 if (ENCODER_MODE_IS_DP(encoder_mode)) {
712 args.v3.sInput.ucDispPllConfig |=
713 DISPPLL_CONFIG_COHERENT_MODE;
714
715 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
716 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
717 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
718 if (dig->coherent_mode)
719 args.v3.sInput.ucDispPllConfig |=
720 DISPPLL_CONFIG_COHERENT_MODE;
721 if (is_duallink)
722 args.v3.sInput.ucDispPllConfig |=
723 DISPPLL_CONFIG_DUAL_LINK;
724 }
725 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
726 ENCODER_OBJECT_ID_NONE)
727 args.v3.sInput.ucExtTransmitterID =
728 radeon_encoder_get_dp_bridge_encoder_id(encoder);
729 else
730 args.v3.sInput.ucExtTransmitterID = 0;
731
732 atom_execute_table(rdev->mode_info.atom_context,
733 index, (uint32_t *)&args);
734 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
735 if (args.v3.sOutput.ucRefDiv) {
736 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
737 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
738 radeon_crtc->pll_reference_div = args.v3.sOutput.ucRefDiv;
739 }
740 if (args.v3.sOutput.ucPostDiv) {
741 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
742 radeon_crtc->pll_flags |= RADEON_PLL_USE_POST_DIV;
743 radeon_crtc->pll_post_div = args.v3.sOutput.ucPostDiv;
744 }
745 break;
746 default:
747 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
748 return adjusted_clock;
749 }
750 break;
751 default:
752 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
753 return adjusted_clock;
754 }
755 }
756 return adjusted_clock;
757}
758
759union set_pixel_clock {
760 SET_PIXEL_CLOCK_PS_ALLOCATION base;
761 PIXEL_CLOCK_PARAMETERS v1;
762 PIXEL_CLOCK_PARAMETERS_V2 v2;
763 PIXEL_CLOCK_PARAMETERS_V3 v3;
764 PIXEL_CLOCK_PARAMETERS_V5 v5;
765 PIXEL_CLOCK_PARAMETERS_V6 v6;
766};
767
768
769
770
771static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
772 u32 dispclk)
773{
774 u8 frev, crev;
775 int index;
776 union set_pixel_clock args;
777
778 memset(&args, 0, sizeof(args));
779
780 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
781 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
782 &crev))
783 return;
784
785 switch (frev) {
786 case 1:
787 switch (crev) {
788 case 5:
789
790
791
792 args.v5.ucCRTC = ATOM_CRTC_INVALID;
793 args.v5.usPixelClock = cpu_to_le16(dispclk);
794 args.v5.ucPpll = ATOM_DCPLL;
795 break;
796 case 6:
797
798
799
800 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
801 if (ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
802 args.v6.ucPpll = ATOM_EXT_PLL1;
803 else if (ASIC_IS_DCE6(rdev))
804 args.v6.ucPpll = ATOM_PPLL0;
805 else
806 args.v6.ucPpll = ATOM_DCPLL;
807 break;
808 default:
809 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
810 return;
811 }
812 break;
813 default:
814 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
815 return;
816 }
817 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
818}
819
820static void atombios_crtc_program_pll(struct drm_crtc *crtc,
821 u32 crtc_id,
822 int pll_id,
823 u32 encoder_mode,
824 u32 encoder_id,
825 u32 clock,
826 u32 ref_div,
827 u32 fb_div,
828 u32 frac_fb_div,
829 u32 post_div,
830 int bpc,
831 bool ss_enabled,
832 struct radeon_atom_ss *ss)
833{
834 struct drm_device *dev = crtc->dev;
835 struct radeon_device *rdev = dev->dev_private;
836 u8 frev, crev;
837 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
838 union set_pixel_clock args;
839
840 memset(&args, 0, sizeof(args));
841
842 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
843 &crev))
844 return;
845
846 switch (frev) {
847 case 1:
848 switch (crev) {
849 case 1:
850 if (clock == ATOM_DISABLE)
851 return;
852 args.v1.usPixelClock = cpu_to_le16(clock / 10);
853 args.v1.usRefDiv = cpu_to_le16(ref_div);
854 args.v1.usFbDiv = cpu_to_le16(fb_div);
855 args.v1.ucFracFbDiv = frac_fb_div;
856 args.v1.ucPostDiv = post_div;
857 args.v1.ucPpll = pll_id;
858 args.v1.ucCRTC = crtc_id;
859 args.v1.ucRefDivSrc = 1;
860 break;
861 case 2:
862 args.v2.usPixelClock = cpu_to_le16(clock / 10);
863 args.v2.usRefDiv = cpu_to_le16(ref_div);
864 args.v2.usFbDiv = cpu_to_le16(fb_div);
865 args.v2.ucFracFbDiv = frac_fb_div;
866 args.v2.ucPostDiv = post_div;
867 args.v2.ucPpll = pll_id;
868 args.v2.ucCRTC = crtc_id;
869 args.v2.ucRefDivSrc = 1;
870 break;
871 case 3:
872 args.v3.usPixelClock = cpu_to_le16(clock / 10);
873 args.v3.usRefDiv = cpu_to_le16(ref_div);
874 args.v3.usFbDiv = cpu_to_le16(fb_div);
875 args.v3.ucFracFbDiv = frac_fb_div;
876 args.v3.ucPostDiv = post_div;
877 args.v3.ucPpll = pll_id;
878 if (crtc_id == ATOM_CRTC2)
879 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2;
880 else
881 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1;
882 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
883 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
884 args.v3.ucTransmitterId = encoder_id;
885 args.v3.ucEncoderMode = encoder_mode;
886 break;
887 case 5:
888 args.v5.ucCRTC = crtc_id;
889 args.v5.usPixelClock = cpu_to_le16(clock / 10);
890 args.v5.ucRefDiv = ref_div;
891 args.v5.usFbDiv = cpu_to_le16(fb_div);
892 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
893 args.v5.ucPostDiv = post_div;
894 args.v5.ucMiscInfo = 0;
895 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
896 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
897 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
898 switch (bpc) {
899 case 8:
900 default:
901 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
902 break;
903 case 10:
904
905 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_32BPP;
906 break;
907 case 12:
908
909 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
910 break;
911 }
912 }
913 args.v5.ucTransmitterID = encoder_id;
914 args.v5.ucEncoderMode = encoder_mode;
915 args.v5.ucPpll = pll_id;
916 break;
917 case 6:
918 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
919 args.v6.ucRefDiv = ref_div;
920 args.v6.usFbDiv = cpu_to_le16(fb_div);
921 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
922 args.v6.ucPostDiv = post_div;
923 args.v6.ucMiscInfo = 0;
924 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
925 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
926 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
927 switch (bpc) {
928 case 8:
929 default:
930 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
931 break;
932 case 10:
933 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP_V6;
934 break;
935 case 12:
936 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP_V6;
937 break;
938 case 16:
939 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
940 break;
941 }
942 }
943 args.v6.ucTransmitterID = encoder_id;
944 args.v6.ucEncoderMode = encoder_mode;
945 args.v6.ucPpll = pll_id;
946 break;
947 default:
948 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
949 return;
950 }
951 break;
952 default:
953 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
954 return;
955 }
956
957 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
958}
959
960static bool atombios_crtc_prepare_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
961{
962 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
963 struct drm_device *dev = crtc->dev;
964 struct radeon_device *rdev = dev->dev_private;
965 struct radeon_encoder *radeon_encoder =
966 to_radeon_encoder(radeon_crtc->encoder);
967 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
968
969 radeon_crtc->bpc = 8;
970 radeon_crtc->ss_enabled = false;
971
972 if (radeon_encoder->is_mst_encoder) {
973 radeon_dp_mst_prepare_pll(crtc, mode);
974 } else if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
975 (radeon_encoder_get_dp_bridge_encoder_id(radeon_crtc->encoder) != ENCODER_OBJECT_ID_NONE)) {
976 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
977 struct drm_connector *connector =
978 radeon_get_connector_for_encoder(radeon_crtc->encoder);
979 struct radeon_connector *radeon_connector =
980 to_radeon_connector(connector);
981 struct radeon_connector_atom_dig *dig_connector =
982 radeon_connector->con_priv;
983 int dp_clock;
984
985
986 radeon_connector->pixelclock_for_modeset = mode->clock;
987 radeon_crtc->bpc = radeon_get_monitor_bpc(connector);
988
989 switch (encoder_mode) {
990 case ATOM_ENCODER_MODE_DP_MST:
991 case ATOM_ENCODER_MODE_DP:
992
993 dp_clock = dig_connector->dp_clock / 10;
994 if (ASIC_IS_DCE4(rdev))
995 radeon_crtc->ss_enabled =
996 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss,
997 ASIC_INTERNAL_SS_ON_DP,
998 dp_clock);
999 else {
1000 if (dp_clock == 16200) {
1001 radeon_crtc->ss_enabled =
1002 radeon_atombios_get_ppll_ss_info(rdev,
1003 &radeon_crtc->ss,
1004 ATOM_DP_SS_ID2);
1005 if (!radeon_crtc->ss_enabled)
1006 radeon_crtc->ss_enabled =
1007 radeon_atombios_get_ppll_ss_info(rdev,
1008 &radeon_crtc->ss,
1009 ATOM_DP_SS_ID1);
1010 } else {
1011 radeon_crtc->ss_enabled =
1012 radeon_atombios_get_ppll_ss_info(rdev,
1013 &radeon_crtc->ss,
1014 ATOM_DP_SS_ID1);
1015 }
1016
1017 radeon_crtc->ss_enabled = false;
1018 }
1019 break;
1020 case ATOM_ENCODER_MODE_LVDS:
1021 if (ASIC_IS_DCE4(rdev))
1022 radeon_crtc->ss_enabled =
1023 radeon_atombios_get_asic_ss_info(rdev,
1024 &radeon_crtc->ss,
1025 dig->lcd_ss_id,
1026 mode->clock / 10);
1027 else
1028 radeon_crtc->ss_enabled =
1029 radeon_atombios_get_ppll_ss_info(rdev,
1030 &radeon_crtc->ss,
1031 dig->lcd_ss_id);
1032 break;
1033 case ATOM_ENCODER_MODE_DVI:
1034 if (ASIC_IS_DCE4(rdev))
1035 radeon_crtc->ss_enabled =
1036 radeon_atombios_get_asic_ss_info(rdev,
1037 &radeon_crtc->ss,
1038 ASIC_INTERNAL_SS_ON_TMDS,
1039 mode->clock / 10);
1040 break;
1041 case ATOM_ENCODER_MODE_HDMI:
1042 if (ASIC_IS_DCE4(rdev))
1043 radeon_crtc->ss_enabled =
1044 radeon_atombios_get_asic_ss_info(rdev,
1045 &radeon_crtc->ss,
1046 ASIC_INTERNAL_SS_ON_HDMI,
1047 mode->clock / 10);
1048 break;
1049 default:
1050 break;
1051 }
1052 }
1053
1054
1055 radeon_crtc->adjusted_clock = atombios_adjust_pll(crtc, mode);
1056
1057 return true;
1058}
1059
1060static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
1061{
1062 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1063 struct drm_device *dev = crtc->dev;
1064 struct radeon_device *rdev = dev->dev_private;
1065 struct radeon_encoder *radeon_encoder =
1066 to_radeon_encoder(radeon_crtc->encoder);
1067 u32 pll_clock = mode->clock;
1068 u32 clock = mode->clock;
1069 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
1070 struct radeon_pll *pll;
1071 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
1072
1073
1074 if (ASIC_IS_DCE5(rdev) &&
1075 (encoder_mode == ATOM_ENCODER_MODE_HDMI) &&
1076 (radeon_crtc->bpc > 8))
1077 clock = radeon_crtc->adjusted_clock;
1078
1079 switch (radeon_crtc->pll_id) {
1080 case ATOM_PPLL1:
1081 pll = &rdev->clock.p1pll;
1082 break;
1083 case ATOM_PPLL2:
1084 pll = &rdev->clock.p2pll;
1085 break;
1086 case ATOM_DCPLL:
1087 case ATOM_PPLL_INVALID:
1088 default:
1089 pll = &rdev->clock.dcpll;
1090 break;
1091 }
1092
1093
1094 pll->flags = radeon_crtc->pll_flags;
1095 pll->reference_div = radeon_crtc->pll_reference_div;
1096 pll->post_div = radeon_crtc->pll_post_div;
1097
1098 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1099
1100 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1101 &fb_div, &frac_fb_div, &ref_div, &post_div);
1102 else if (ASIC_IS_AVIVO(rdev))
1103 radeon_compute_pll_avivo(pll, radeon_crtc->adjusted_clock, &pll_clock,
1104 &fb_div, &frac_fb_div, &ref_div, &post_div);
1105 else
1106 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1107 &fb_div, &frac_fb_div, &ref_div, &post_div);
1108
1109 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id,
1110 radeon_crtc->crtc_id, &radeon_crtc->ss);
1111
1112 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1113 encoder_mode, radeon_encoder->encoder_id, clock,
1114 ref_div, fb_div, frac_fb_div, post_div,
1115 radeon_crtc->bpc, radeon_crtc->ss_enabled, &radeon_crtc->ss);
1116
1117 if (radeon_crtc->ss_enabled) {
1118
1119 if (ASIC_IS_DCE4(rdev)) {
1120 u32 step_size;
1121 u32 amount = (((fb_div * 10) + frac_fb_div) *
1122 (u32)radeon_crtc->ss.percentage) /
1123 (100 * (u32)radeon_crtc->ss.percentage_divider);
1124 radeon_crtc->ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1125 radeon_crtc->ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1126 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1127 if (radeon_crtc->ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1128 step_size = (4 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1129 (125 * 25 * pll->reference_freq / 100);
1130 else
1131 step_size = (2 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1132 (125 * 25 * pll->reference_freq / 100);
1133 radeon_crtc->ss.step = step_size;
1134 }
1135
1136 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id,
1137 radeon_crtc->crtc_id, &radeon_crtc->ss);
1138 }
1139}
1140
1141static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1142 struct drm_framebuffer *fb,
1143 int x, int y, int atomic)
1144{
1145 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1146 struct drm_device *dev = crtc->dev;
1147 struct radeon_device *rdev = dev->dev_private;
1148 struct radeon_framebuffer *radeon_fb;
1149 struct drm_framebuffer *target_fb;
1150 struct drm_gem_object *obj;
1151 struct radeon_bo *rbo;
1152 uint64_t fb_location;
1153 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1154 unsigned bankw, bankh, mtaspect, tile_split;
1155 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1156 u32 tmp, viewport_w, viewport_h;
1157 int r;
1158 bool bypass_lut = false;
1159 char *format_name;
1160
1161
1162 if (!atomic && !crtc->primary->fb) {
1163 DRM_DEBUG_KMS("No FB bound\n");
1164 return 0;
1165 }
1166
1167 if (atomic) {
1168 radeon_fb = to_radeon_framebuffer(fb);
1169 target_fb = fb;
1170 }
1171 else {
1172 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
1173 target_fb = crtc->primary->fb;
1174 }
1175
1176
1177
1178
1179 obj = radeon_fb->obj;
1180 rbo = gem_to_radeon_bo(obj);
1181 r = radeon_bo_reserve(rbo, false);
1182 if (unlikely(r != 0))
1183 return r;
1184
1185 if (atomic)
1186 fb_location = radeon_bo_gpu_offset(rbo);
1187 else {
1188 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1189 if (unlikely(r != 0)) {
1190 radeon_bo_unreserve(rbo);
1191 return -EINVAL;
1192 }
1193 }
1194
1195 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1196 radeon_bo_unreserve(rbo);
1197
1198 switch (target_fb->pixel_format) {
1199 case DRM_FORMAT_C8:
1200 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1201 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1202 break;
1203 case DRM_FORMAT_XRGB4444:
1204 case DRM_FORMAT_ARGB4444:
1205 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1206 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB4444));
1207#ifdef __BIG_ENDIAN
1208 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1209#endif
1210 break;
1211 case DRM_FORMAT_XRGB1555:
1212 case DRM_FORMAT_ARGB1555:
1213 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1214 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1215#ifdef __BIG_ENDIAN
1216 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1217#endif
1218 break;
1219 case DRM_FORMAT_BGRX5551:
1220 case DRM_FORMAT_BGRA5551:
1221 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1222 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA5551));
1223#ifdef __BIG_ENDIAN
1224 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1225#endif
1226 break;
1227 case DRM_FORMAT_RGB565:
1228 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1229 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1230#ifdef __BIG_ENDIAN
1231 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1232#endif
1233 break;
1234 case DRM_FORMAT_XRGB8888:
1235 case DRM_FORMAT_ARGB8888:
1236 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1237 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1238#ifdef __BIG_ENDIAN
1239 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1240#endif
1241 break;
1242 case DRM_FORMAT_XRGB2101010:
1243 case DRM_FORMAT_ARGB2101010:
1244 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1245 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB2101010));
1246#ifdef __BIG_ENDIAN
1247 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1248#endif
1249
1250 bypass_lut = true;
1251 break;
1252 case DRM_FORMAT_BGRX1010102:
1253 case DRM_FORMAT_BGRA1010102:
1254 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1255 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA1010102));
1256#ifdef __BIG_ENDIAN
1257 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1258#endif
1259
1260 bypass_lut = true;
1261 break;
1262 default:
1263 format_name = drm_get_format_name(target_fb->pixel_format);
1264 DRM_ERROR("Unsupported screen format %s\n", format_name);
1265 kfree(format_name);
1266 return -EINVAL;
1267 }
1268
1269 if (tiling_flags & RADEON_TILING_MACRO) {
1270 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1271
1272
1273 if (rdev->family >= CHIP_TAHITI) {
1274 unsigned index, num_banks;
1275
1276 if (rdev->family >= CHIP_BONAIRE) {
1277 unsigned tileb, tile_split_bytes;
1278
1279
1280 tile_split_bytes = 64 << tile_split;
1281 tileb = 8 * 8 * target_fb->bits_per_pixel / 8;
1282 tileb = min(tile_split_bytes, tileb);
1283
1284 for (index = 0; tileb > 64; index++)
1285 tileb >>= 1;
1286
1287 if (index >= 16) {
1288 DRM_ERROR("Wrong screen bpp (%u) or tile split (%u)\n",
1289 target_fb->bits_per_pixel, tile_split);
1290 return -EINVAL;
1291 }
1292
1293 num_banks = (rdev->config.cik.macrotile_mode_array[index] >> 6) & 0x3;
1294 } else {
1295 switch (target_fb->bits_per_pixel) {
1296 case 8:
1297 index = 10;
1298 break;
1299 case 16:
1300 index = SI_TILE_MODE_COLOR_2D_SCANOUT_16BPP;
1301 break;
1302 default:
1303 case 32:
1304 index = SI_TILE_MODE_COLOR_2D_SCANOUT_32BPP;
1305 break;
1306 }
1307
1308 num_banks = (rdev->config.si.tile_mode_array[index] >> 20) & 0x3;
1309 }
1310
1311 fb_format |= EVERGREEN_GRPH_NUM_BANKS(num_banks);
1312 } else {
1313
1314 if (rdev->family >= CHIP_CAYMAN)
1315 tmp = rdev->config.cayman.tile_config;
1316 else
1317 tmp = rdev->config.evergreen.tile_config;
1318
1319 switch ((tmp & 0xf0) >> 4) {
1320 case 0:
1321 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1322 break;
1323 case 1:
1324 default:
1325 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1326 break;
1327 case 2:
1328 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1329 break;
1330 }
1331 }
1332
1333 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1334 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1335 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1336 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1337 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1338 if (rdev->family >= CHIP_BONAIRE) {
1339
1340 fb_format |= CIK_GRPH_MICRO_TILE_MODE(CIK_DISPLAY_MICRO_TILING);
1341 }
1342 } else if (tiling_flags & RADEON_TILING_MICRO)
1343 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1344
1345 if (rdev->family >= CHIP_BONAIRE) {
1346
1347
1348
1349 u32 pipe_config = (rdev->config.cik.tile_mode_array[10] >> 6) & 0x1f;
1350
1351 fb_format |= CIK_GRPH_PIPE_CONFIG(pipe_config);
1352 } else if ((rdev->family == CHIP_TAHITI) ||
1353 (rdev->family == CHIP_PITCAIRN))
1354 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1355 else if ((rdev->family == CHIP_VERDE) ||
1356 (rdev->family == CHIP_OLAND) ||
1357 (rdev->family == CHIP_HAINAN))
1358 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1359
1360 switch (radeon_crtc->crtc_id) {
1361 case 0:
1362 WREG32(AVIVO_D1VGA_CONTROL, 0);
1363 break;
1364 case 1:
1365 WREG32(AVIVO_D2VGA_CONTROL, 0);
1366 break;
1367 case 2:
1368 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1369 break;
1370 case 3:
1371 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1372 break;
1373 case 4:
1374 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1375 break;
1376 case 5:
1377 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1378 break;
1379 default:
1380 break;
1381 }
1382
1383
1384
1385
1386 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1387
1388 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1389 upper_32_bits(fb_location));
1390 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1391 upper_32_bits(fb_location));
1392 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1393 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1394 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1395 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1396 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1397 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1398
1399
1400
1401
1402
1403
1404 WREG32_P(EVERGREEN_GRPH_LUT_10BIT_BYPASS_CONTROL + radeon_crtc->crtc_offset,
1405 (bypass_lut ? EVERGREEN_LUT_10BIT_BYPASS_EN : 0),
1406 ~EVERGREEN_LUT_10BIT_BYPASS_EN);
1407
1408 if (bypass_lut)
1409 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1410
1411 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1412 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1413 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1414 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1415 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1416 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1417
1418 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1419 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1420 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1421
1422 if (rdev->family >= CHIP_BONAIRE)
1423 WREG32(CIK_LB_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1424 target_fb->height);
1425 else
1426 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1427 target_fb->height);
1428 x &= ~3;
1429 y &= ~1;
1430 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1431 (x << 16) | y);
1432 viewport_w = crtc->mode.hdisplay;
1433 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1434 if ((rdev->family >= CHIP_BONAIRE) &&
1435 (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE))
1436 viewport_h *= 2;
1437 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1438 (viewport_w << 16) | viewport_h);
1439
1440
1441 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1442
1443 if (!atomic && fb && fb != crtc->primary->fb) {
1444 radeon_fb = to_radeon_framebuffer(fb);
1445 rbo = gem_to_radeon_bo(radeon_fb->obj);
1446 r = radeon_bo_reserve(rbo, false);
1447 if (unlikely(r != 0))
1448 return r;
1449 radeon_bo_unpin(rbo);
1450 radeon_bo_unreserve(rbo);
1451 }
1452
1453
1454 radeon_bandwidth_update(rdev);
1455
1456 return 0;
1457}
1458
1459static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1460 struct drm_framebuffer *fb,
1461 int x, int y, int atomic)
1462{
1463 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1464 struct drm_device *dev = crtc->dev;
1465 struct radeon_device *rdev = dev->dev_private;
1466 struct radeon_framebuffer *radeon_fb;
1467 struct drm_gem_object *obj;
1468 struct radeon_bo *rbo;
1469 struct drm_framebuffer *target_fb;
1470 uint64_t fb_location;
1471 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1472 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1473 u32 viewport_w, viewport_h;
1474 int r;
1475 bool bypass_lut = false;
1476 char *format_name;
1477
1478
1479 if (!atomic && !crtc->primary->fb) {
1480 DRM_DEBUG_KMS("No FB bound\n");
1481 return 0;
1482 }
1483
1484 if (atomic) {
1485 radeon_fb = to_radeon_framebuffer(fb);
1486 target_fb = fb;
1487 }
1488 else {
1489 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
1490 target_fb = crtc->primary->fb;
1491 }
1492
1493 obj = radeon_fb->obj;
1494 rbo = gem_to_radeon_bo(obj);
1495 r = radeon_bo_reserve(rbo, false);
1496 if (unlikely(r != 0))
1497 return r;
1498
1499
1500
1501
1502 if (atomic)
1503 fb_location = radeon_bo_gpu_offset(rbo);
1504 else {
1505 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1506 if (unlikely(r != 0)) {
1507 radeon_bo_unreserve(rbo);
1508 return -EINVAL;
1509 }
1510 }
1511 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1512 radeon_bo_unreserve(rbo);
1513
1514 switch (target_fb->pixel_format) {
1515 case DRM_FORMAT_C8:
1516 fb_format =
1517 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1518 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1519 break;
1520 case DRM_FORMAT_XRGB4444:
1521 case DRM_FORMAT_ARGB4444:
1522 fb_format =
1523 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1524 AVIVO_D1GRPH_CONTROL_16BPP_ARGB4444;
1525#ifdef __BIG_ENDIAN
1526 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1527#endif
1528 break;
1529 case DRM_FORMAT_XRGB1555:
1530 fb_format =
1531 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1532 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1533#ifdef __BIG_ENDIAN
1534 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1535#endif
1536 break;
1537 case DRM_FORMAT_RGB565:
1538 fb_format =
1539 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1540 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1541#ifdef __BIG_ENDIAN
1542 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1543#endif
1544 break;
1545 case DRM_FORMAT_XRGB8888:
1546 case DRM_FORMAT_ARGB8888:
1547 fb_format =
1548 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1549 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1550#ifdef __BIG_ENDIAN
1551 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1552#endif
1553 break;
1554 case DRM_FORMAT_XRGB2101010:
1555 case DRM_FORMAT_ARGB2101010:
1556 fb_format =
1557 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1558 AVIVO_D1GRPH_CONTROL_32BPP_ARGB2101010;
1559#ifdef __BIG_ENDIAN
1560 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1561#endif
1562
1563 bypass_lut = true;
1564 break;
1565 default:
1566 format_name = drm_get_format_name(target_fb->pixel_format);
1567 DRM_ERROR("Unsupported screen format %s\n", format_name);
1568 kfree(format_name);
1569 return -EINVAL;
1570 }
1571
1572 if (rdev->family >= CHIP_R600) {
1573 if (tiling_flags & RADEON_TILING_MACRO)
1574 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1575 else if (tiling_flags & RADEON_TILING_MICRO)
1576 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1577 } else {
1578 if (tiling_flags & RADEON_TILING_MACRO)
1579 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1580
1581 if (tiling_flags & RADEON_TILING_MICRO)
1582 fb_format |= AVIVO_D1GRPH_TILED;
1583 }
1584
1585 if (radeon_crtc->crtc_id == 0)
1586 WREG32(AVIVO_D1VGA_CONTROL, 0);
1587 else
1588 WREG32(AVIVO_D2VGA_CONTROL, 0);
1589
1590
1591
1592
1593 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1594
1595 if (rdev->family >= CHIP_RV770) {
1596 if (radeon_crtc->crtc_id) {
1597 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1598 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1599 } else {
1600 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1601 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1602 }
1603 }
1604 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1605 (u32) fb_location);
1606 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1607 radeon_crtc->crtc_offset, (u32) fb_location);
1608 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1609 if (rdev->family >= CHIP_R600)
1610 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1611
1612
1613 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset,
1614 (bypass_lut ? AVIVO_LUT_10BIT_BYPASS_EN : 0), ~AVIVO_LUT_10BIT_BYPASS_EN);
1615
1616 if (bypass_lut)
1617 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1618
1619 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1620 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1621 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1622 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1623 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1624 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1625
1626 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1627 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1628 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1629
1630 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1631 target_fb->height);
1632 x &= ~3;
1633 y &= ~1;
1634 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1635 (x << 16) | y);
1636 viewport_w = crtc->mode.hdisplay;
1637 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1638 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1639 (viewport_w << 16) | viewport_h);
1640
1641
1642 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 3);
1643
1644 if (!atomic && fb && fb != crtc->primary->fb) {
1645 radeon_fb = to_radeon_framebuffer(fb);
1646 rbo = gem_to_radeon_bo(radeon_fb->obj);
1647 r = radeon_bo_reserve(rbo, false);
1648 if (unlikely(r != 0))
1649 return r;
1650 radeon_bo_unpin(rbo);
1651 radeon_bo_unreserve(rbo);
1652 }
1653
1654
1655 radeon_bandwidth_update(rdev);
1656
1657 return 0;
1658}
1659
1660int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1661 struct drm_framebuffer *old_fb)
1662{
1663 struct drm_device *dev = crtc->dev;
1664 struct radeon_device *rdev = dev->dev_private;
1665
1666 if (ASIC_IS_DCE4(rdev))
1667 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1668 else if (ASIC_IS_AVIVO(rdev))
1669 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1670 else
1671 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1672}
1673
1674int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1675 struct drm_framebuffer *fb,
1676 int x, int y, enum mode_set_atomic state)
1677{
1678 struct drm_device *dev = crtc->dev;
1679 struct radeon_device *rdev = dev->dev_private;
1680
1681 if (ASIC_IS_DCE4(rdev))
1682 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1683 else if (ASIC_IS_AVIVO(rdev))
1684 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1685 else
1686 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1687}
1688
1689
1690static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1691{
1692 struct drm_device *dev = crtc->dev;
1693 struct radeon_device *rdev = dev->dev_private;
1694 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1695 u32 disp_merge_cntl;
1696
1697 switch (radeon_crtc->crtc_id) {
1698 case 0:
1699 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1700 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1701 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1702 break;
1703 case 1:
1704 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1705 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1706 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1707 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1708 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1709 break;
1710 }
1711}
1712
1713
1714
1715
1716
1717
1718
1719
1720static u32 radeon_get_pll_use_mask(struct drm_crtc *crtc)
1721{
1722 struct drm_device *dev = crtc->dev;
1723 struct drm_crtc *test_crtc;
1724 struct radeon_crtc *test_radeon_crtc;
1725 u32 pll_in_use = 0;
1726
1727 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1728 if (crtc == test_crtc)
1729 continue;
1730
1731 test_radeon_crtc = to_radeon_crtc(test_crtc);
1732 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1733 pll_in_use |= (1 << test_radeon_crtc->pll_id);
1734 }
1735 return pll_in_use;
1736}
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746
1747static int radeon_get_shared_dp_ppll(struct drm_crtc *crtc)
1748{
1749 struct drm_device *dev = crtc->dev;
1750 struct radeon_device *rdev = dev->dev_private;
1751 struct drm_crtc *test_crtc;
1752 struct radeon_crtc *test_radeon_crtc;
1753
1754 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1755 if (crtc == test_crtc)
1756 continue;
1757 test_radeon_crtc = to_radeon_crtc(test_crtc);
1758 if (test_radeon_crtc->encoder &&
1759 ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1760
1761 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1762 test_radeon_crtc->pll_id == ATOM_PPLL2)
1763 continue;
1764
1765 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1766 return test_radeon_crtc->pll_id;
1767 }
1768 }
1769 return ATOM_PPLL_INVALID;
1770}
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780
1781static int radeon_get_shared_nondp_ppll(struct drm_crtc *crtc)
1782{
1783 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1784 struct drm_device *dev = crtc->dev;
1785 struct radeon_device *rdev = dev->dev_private;
1786 struct drm_crtc *test_crtc;
1787 struct radeon_crtc *test_radeon_crtc;
1788 u32 adjusted_clock, test_adjusted_clock;
1789
1790 adjusted_clock = radeon_crtc->adjusted_clock;
1791
1792 if (adjusted_clock == 0)
1793 return ATOM_PPLL_INVALID;
1794
1795 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1796 if (crtc == test_crtc)
1797 continue;
1798 test_radeon_crtc = to_radeon_crtc(test_crtc);
1799 if (test_radeon_crtc->encoder &&
1800 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1801
1802 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1803 test_radeon_crtc->pll_id == ATOM_PPLL2)
1804 continue;
1805
1806 if (test_radeon_crtc->connector == radeon_crtc->connector) {
1807
1808 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1809 return test_radeon_crtc->pll_id;
1810 }
1811
1812 test_adjusted_clock = test_radeon_crtc->adjusted_clock;
1813 if ((crtc->mode.clock == test_crtc->mode.clock) &&
1814 (adjusted_clock == test_adjusted_clock) &&
1815 (radeon_crtc->ss_enabled == test_radeon_crtc->ss_enabled) &&
1816 (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID))
1817 return test_radeon_crtc->pll_id;
1818 }
1819 }
1820 return ATOM_PPLL_INVALID;
1821}
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1861{
1862 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1863 struct drm_device *dev = crtc->dev;
1864 struct radeon_device *rdev = dev->dev_private;
1865 struct radeon_encoder *radeon_encoder =
1866 to_radeon_encoder(radeon_crtc->encoder);
1867 u32 pll_in_use;
1868 int pll;
1869
1870 if (ASIC_IS_DCE8(rdev)) {
1871 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1872 if (rdev->clock.dp_extclk)
1873
1874 return ATOM_PPLL_INVALID;
1875 else {
1876
1877 pll = radeon_get_shared_dp_ppll(crtc);
1878 if (pll != ATOM_PPLL_INVALID)
1879 return pll;
1880 }
1881 } else {
1882
1883 pll = radeon_get_shared_nondp_ppll(crtc);
1884 if (pll != ATOM_PPLL_INVALID)
1885 return pll;
1886 }
1887
1888 if ((rdev->family == CHIP_KABINI) ||
1889 (rdev->family == CHIP_MULLINS)) {
1890
1891 pll_in_use = radeon_get_pll_use_mask(crtc);
1892 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1893 return ATOM_PPLL2;
1894 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1895 return ATOM_PPLL1;
1896 DRM_ERROR("unable to allocate a PPLL\n");
1897 return ATOM_PPLL_INVALID;
1898 } else {
1899
1900 pll_in_use = radeon_get_pll_use_mask(crtc);
1901 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1902 return ATOM_PPLL2;
1903 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1904 return ATOM_PPLL1;
1905 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1906 return ATOM_PPLL0;
1907 DRM_ERROR("unable to allocate a PPLL\n");
1908 return ATOM_PPLL_INVALID;
1909 }
1910 } else if (ASIC_IS_DCE61(rdev)) {
1911 struct radeon_encoder_atom_dig *dig =
1912 radeon_encoder->enc_priv;
1913
1914 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1915 (dig->linkb == false))
1916
1917 return ATOM_PPLL2;
1918 else if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1919
1920 if (rdev->clock.dp_extclk)
1921
1922 return ATOM_PPLL_INVALID;
1923 else {
1924
1925 pll = radeon_get_shared_dp_ppll(crtc);
1926 if (pll != ATOM_PPLL_INVALID)
1927 return pll;
1928 }
1929 } else {
1930
1931 pll = radeon_get_shared_nondp_ppll(crtc);
1932 if (pll != ATOM_PPLL_INVALID)
1933 return pll;
1934 }
1935
1936 pll_in_use = radeon_get_pll_use_mask(crtc);
1937 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1938 return ATOM_PPLL0;
1939 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1940 return ATOM_PPLL1;
1941 DRM_ERROR("unable to allocate a PPLL\n");
1942 return ATOM_PPLL_INVALID;
1943 } else if (ASIC_IS_DCE41(rdev)) {
1944
1945 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1946 if (rdev->clock.dp_extclk)
1947
1948 return ATOM_PPLL_INVALID;
1949 }
1950 pll_in_use = radeon_get_pll_use_mask(crtc);
1951 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1952 return ATOM_PPLL1;
1953 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1954 return ATOM_PPLL2;
1955 DRM_ERROR("unable to allocate a PPLL\n");
1956 return ATOM_PPLL_INVALID;
1957 } else if (ASIC_IS_DCE4(rdev)) {
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967
1968 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1969 if (rdev->clock.dp_extclk)
1970
1971 return ATOM_PPLL_INVALID;
1972 else if (ASIC_IS_DCE6(rdev))
1973
1974 return ATOM_PPLL0;
1975 else if (ASIC_IS_DCE5(rdev))
1976
1977 return ATOM_DCPLL;
1978 else {
1979
1980 pll = radeon_get_shared_dp_ppll(crtc);
1981 if (pll != ATOM_PPLL_INVALID)
1982 return pll;
1983 }
1984 } else {
1985
1986 pll = radeon_get_shared_nondp_ppll(crtc);
1987 if (pll != ATOM_PPLL_INVALID)
1988 return pll;
1989 }
1990
1991 pll_in_use = radeon_get_pll_use_mask(crtc);
1992 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1993 return ATOM_PPLL1;
1994 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1995 return ATOM_PPLL2;
1996 DRM_ERROR("unable to allocate a PPLL\n");
1997 return ATOM_PPLL_INVALID;
1998 } else {
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014 return radeon_crtc->crtc_id;
2015 }
2016}
2017
2018void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
2019{
2020
2021 if (ASIC_IS_DCE6(rdev))
2022 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2023 else if (ASIC_IS_DCE4(rdev)) {
2024 struct radeon_atom_ss ss;
2025 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
2026 ASIC_INTERNAL_SS_ON_DCPLL,
2027 rdev->clock.default_dispclk);
2028 if (ss_enabled)
2029 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
2030
2031 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2032 if (ss_enabled)
2033 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
2034 }
2035
2036}
2037
2038int atombios_crtc_mode_set(struct drm_crtc *crtc,
2039 struct drm_display_mode *mode,
2040 struct drm_display_mode *adjusted_mode,
2041 int x, int y, struct drm_framebuffer *old_fb)
2042{
2043 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2044 struct drm_device *dev = crtc->dev;
2045 struct radeon_device *rdev = dev->dev_private;
2046 struct radeon_encoder *radeon_encoder =
2047 to_radeon_encoder(radeon_crtc->encoder);
2048 bool is_tvcv = false;
2049
2050 if (radeon_encoder->active_device &
2051 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
2052 is_tvcv = true;
2053
2054 if (!radeon_crtc->adjusted_clock)
2055 return -EINVAL;
2056
2057 atombios_crtc_set_pll(crtc, adjusted_mode);
2058
2059 if (ASIC_IS_DCE4(rdev))
2060 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2061 else if (ASIC_IS_AVIVO(rdev)) {
2062 if (is_tvcv)
2063 atombios_crtc_set_timing(crtc, adjusted_mode);
2064 else
2065 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2066 } else {
2067 atombios_crtc_set_timing(crtc, adjusted_mode);
2068 if (radeon_crtc->crtc_id == 0)
2069 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2070 radeon_legacy_atom_fixup(crtc);
2071 }
2072 atombios_crtc_set_base(crtc, x, y, old_fb);
2073 atombios_overscan_setup(crtc, mode, adjusted_mode);
2074 atombios_scaler_setup(crtc);
2075 radeon_cursor_reset(crtc);
2076
2077 radeon_crtc->hw_mode = *adjusted_mode;
2078
2079 return 0;
2080}
2081
2082static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
2083 const struct drm_display_mode *mode,
2084 struct drm_display_mode *adjusted_mode)
2085{
2086 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2087 struct drm_device *dev = crtc->dev;
2088 struct drm_encoder *encoder;
2089
2090
2091 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2092 if (encoder->crtc == crtc) {
2093 radeon_crtc->encoder = encoder;
2094 radeon_crtc->connector = radeon_get_connector_for_encoder(encoder);
2095 break;
2096 }
2097 }
2098 if ((radeon_crtc->encoder == NULL) || (radeon_crtc->connector == NULL)) {
2099 radeon_crtc->encoder = NULL;
2100 radeon_crtc->connector = NULL;
2101 return false;
2102 }
2103 if (radeon_crtc->encoder) {
2104 struct radeon_encoder *radeon_encoder =
2105 to_radeon_encoder(radeon_crtc->encoder);
2106
2107 radeon_crtc->output_csc = radeon_encoder->output_csc;
2108 }
2109 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
2110 return false;
2111 if (!atombios_crtc_prepare_pll(crtc, adjusted_mode))
2112 return false;
2113
2114 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
2115
2116 if ((radeon_crtc->pll_id == ATOM_PPLL_INVALID) &&
2117 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder)))
2118 return false;
2119
2120 return true;
2121}
2122
2123static void atombios_crtc_prepare(struct drm_crtc *crtc)
2124{
2125 struct drm_device *dev = crtc->dev;
2126 struct radeon_device *rdev = dev->dev_private;
2127
2128
2129 if (ASIC_IS_DCE6(rdev))
2130 atombios_powergate_crtc(crtc, ATOM_DISABLE);
2131
2132 atombios_lock_crtc(crtc, ATOM_ENABLE);
2133 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2134}
2135
2136static void atombios_crtc_commit(struct drm_crtc *crtc)
2137{
2138 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
2139 atombios_lock_crtc(crtc, ATOM_DISABLE);
2140}
2141
2142static void atombios_crtc_disable(struct drm_crtc *crtc)
2143{
2144 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2145 struct drm_device *dev = crtc->dev;
2146 struct radeon_device *rdev = dev->dev_private;
2147 struct radeon_atom_ss ss;
2148 int i;
2149
2150 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2151 if (crtc->primary->fb) {
2152 int r;
2153 struct radeon_framebuffer *radeon_fb;
2154 struct radeon_bo *rbo;
2155
2156 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
2157 rbo = gem_to_radeon_bo(radeon_fb->obj);
2158 r = radeon_bo_reserve(rbo, false);
2159 if (unlikely(r))
2160 DRM_ERROR("failed to reserve rbo before unpin\n");
2161 else {
2162 radeon_bo_unpin(rbo);
2163 radeon_bo_unreserve(rbo);
2164 }
2165 }
2166
2167 if (ASIC_IS_DCE4(rdev))
2168 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2169 else if (ASIC_IS_AVIVO(rdev))
2170 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2171
2172 if (ASIC_IS_DCE6(rdev))
2173 atombios_powergate_crtc(crtc, ATOM_ENABLE);
2174
2175 for (i = 0; i < rdev->num_crtc; i++) {
2176 if (rdev->mode_info.crtcs[i] &&
2177 rdev->mode_info.crtcs[i]->enabled &&
2178 i != radeon_crtc->crtc_id &&
2179 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) {
2180
2181
2182
2183 goto done;
2184 }
2185 }
2186
2187 switch (radeon_crtc->pll_id) {
2188 case ATOM_PPLL1:
2189 case ATOM_PPLL2:
2190
2191 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2192 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2193 break;
2194 case ATOM_PPLL0:
2195
2196 if ((rdev->family == CHIP_ARUBA) ||
2197 (rdev->family == CHIP_KAVERI) ||
2198 (rdev->family == CHIP_BONAIRE) ||
2199 (rdev->family == CHIP_HAWAII))
2200 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2201 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2202 break;
2203 default:
2204 break;
2205 }
2206done:
2207 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2208 radeon_crtc->adjusted_clock = 0;
2209 radeon_crtc->encoder = NULL;
2210 radeon_crtc->connector = NULL;
2211}
2212
2213static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
2214 .dpms = atombios_crtc_dpms,
2215 .mode_fixup = atombios_crtc_mode_fixup,
2216 .mode_set = atombios_crtc_mode_set,
2217 .mode_set_base = atombios_crtc_set_base,
2218 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
2219 .prepare = atombios_crtc_prepare,
2220 .commit = atombios_crtc_commit,
2221 .load_lut = radeon_crtc_load_lut,
2222 .disable = atombios_crtc_disable,
2223};
2224
2225void radeon_atombios_init_crtc(struct drm_device *dev,
2226 struct radeon_crtc *radeon_crtc)
2227{
2228 struct radeon_device *rdev = dev->dev_private;
2229
2230 if (ASIC_IS_DCE4(rdev)) {
2231 switch (radeon_crtc->crtc_id) {
2232 case 0:
2233 default:
2234 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
2235 break;
2236 case 1:
2237 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
2238 break;
2239 case 2:
2240 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
2241 break;
2242 case 3:
2243 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
2244 break;
2245 case 4:
2246 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
2247 break;
2248 case 5:
2249 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
2250 break;
2251 }
2252 } else {
2253 if (radeon_crtc->crtc_id == 1)
2254 radeon_crtc->crtc_offset =
2255 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
2256 else
2257 radeon_crtc->crtc_offset = 0;
2258 }
2259 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2260 radeon_crtc->adjusted_clock = 0;
2261 radeon_crtc->encoder = NULL;
2262 radeon_crtc->connector = NULL;
2263 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
2264}
2265