1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26#include <drm/drmP.h>
27#include <drm/drm_crtc_helper.h>
28#include <drm/drm_fb_helper.h>
29#include <drm/radeon_drm.h>
30#include <drm/drm_fixed.h>
31#include "radeon.h"
32#include "atom.h"
33#include "atom-bits.h"
34
35static void atombios_overscan_setup(struct drm_crtc *crtc,
36 struct drm_display_mode *mode,
37 struct drm_display_mode *adjusted_mode)
38{
39 struct drm_device *dev = crtc->dev;
40 struct radeon_device *rdev = dev->dev_private;
41 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
42 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
43 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
44 int a1, a2;
45
46 memset(&args, 0, sizeof(args));
47
48 args.ucCRTC = radeon_crtc->crtc_id;
49
50 switch (radeon_crtc->rmx_type) {
51 case RMX_CENTER:
52 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
53 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
54 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
55 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
56 break;
57 case RMX_ASPECT:
58 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
59 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
60
61 if (a1 > a2) {
62 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
63 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
64 } else if (a2 > a1) {
65 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
66 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
67 }
68 break;
69 case RMX_FULL:
70 default:
71 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
72 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
73 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
74 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
75 break;
76 }
77 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
78}
79
80static void atombios_scaler_setup(struct drm_crtc *crtc)
81{
82 struct drm_device *dev = crtc->dev;
83 struct radeon_device *rdev = dev->dev_private;
84 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
85 ENABLE_SCALER_PS_ALLOCATION args;
86 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
87 struct radeon_encoder *radeon_encoder =
88 to_radeon_encoder(radeon_crtc->encoder);
89
90 enum radeon_tv_std tv_std = TV_STD_NTSC;
91 bool is_tv = false, is_cv = false;
92
93 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
94 return;
95
96 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
97 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
98 tv_std = tv_dac->tv_std;
99 is_tv = true;
100 }
101
102 memset(&args, 0, sizeof(args));
103
104 args.ucScaler = radeon_crtc->crtc_id;
105
106 if (is_tv) {
107 switch (tv_std) {
108 case TV_STD_NTSC:
109 default:
110 args.ucTVStandard = ATOM_TV_NTSC;
111 break;
112 case TV_STD_PAL:
113 args.ucTVStandard = ATOM_TV_PAL;
114 break;
115 case TV_STD_PAL_M:
116 args.ucTVStandard = ATOM_TV_PALM;
117 break;
118 case TV_STD_PAL_60:
119 args.ucTVStandard = ATOM_TV_PAL60;
120 break;
121 case TV_STD_NTSC_J:
122 args.ucTVStandard = ATOM_TV_NTSCJ;
123 break;
124 case TV_STD_SCART_PAL:
125 args.ucTVStandard = ATOM_TV_PAL;
126 break;
127 case TV_STD_SECAM:
128 args.ucTVStandard = ATOM_TV_SECAM;
129 break;
130 case TV_STD_PAL_CN:
131 args.ucTVStandard = ATOM_TV_PALCN;
132 break;
133 }
134 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
135 } else if (is_cv) {
136 args.ucTVStandard = ATOM_TV_CV;
137 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
138 } else {
139 switch (radeon_crtc->rmx_type) {
140 case RMX_FULL:
141 args.ucEnable = ATOM_SCALER_EXPANSION;
142 break;
143 case RMX_CENTER:
144 args.ucEnable = ATOM_SCALER_CENTER;
145 break;
146 case RMX_ASPECT:
147 args.ucEnable = ATOM_SCALER_EXPANSION;
148 break;
149 default:
150 if (ASIC_IS_AVIVO(rdev))
151 args.ucEnable = ATOM_SCALER_DISABLE;
152 else
153 args.ucEnable = ATOM_SCALER_CENTER;
154 break;
155 }
156 }
157 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
158 if ((is_tv || is_cv)
159 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
160 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
161 }
162}
163
164static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
165{
166 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
167 struct drm_device *dev = crtc->dev;
168 struct radeon_device *rdev = dev->dev_private;
169 int index =
170 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
171 ENABLE_CRTC_PS_ALLOCATION args;
172
173 memset(&args, 0, sizeof(args));
174
175 args.ucCRTC = radeon_crtc->crtc_id;
176 args.ucEnable = lock;
177
178 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
179}
180
181static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
182{
183 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
184 struct drm_device *dev = crtc->dev;
185 struct radeon_device *rdev = dev->dev_private;
186 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
187 ENABLE_CRTC_PS_ALLOCATION args;
188
189 memset(&args, 0, sizeof(args));
190
191 args.ucCRTC = radeon_crtc->crtc_id;
192 args.ucEnable = state;
193
194 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
195}
196
197static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
198{
199 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
200 struct drm_device *dev = crtc->dev;
201 struct radeon_device *rdev = dev->dev_private;
202 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
203 ENABLE_CRTC_PS_ALLOCATION args;
204
205 memset(&args, 0, sizeof(args));
206
207 args.ucCRTC = radeon_crtc->crtc_id;
208 args.ucEnable = state;
209
210 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
211}
212
213static const u32 vga_control_regs[6] =
214{
215 AVIVO_D1VGA_CONTROL,
216 AVIVO_D2VGA_CONTROL,
217 EVERGREEN_D3VGA_CONTROL,
218 EVERGREEN_D4VGA_CONTROL,
219 EVERGREEN_D5VGA_CONTROL,
220 EVERGREEN_D6VGA_CONTROL,
221};
222
223static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
224{
225 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
226 struct drm_device *dev = crtc->dev;
227 struct radeon_device *rdev = dev->dev_private;
228 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
229 BLANK_CRTC_PS_ALLOCATION args;
230 u32 vga_control = 0;
231
232 memset(&args, 0, sizeof(args));
233
234 if (ASIC_IS_DCE8(rdev)) {
235 vga_control = RREG32(vga_control_regs[radeon_crtc->crtc_id]);
236 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control | 1);
237 }
238
239 args.ucCRTC = radeon_crtc->crtc_id;
240 args.ucBlanking = state;
241
242 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
243
244 if (ASIC_IS_DCE8(rdev)) {
245 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control);
246 }
247}
248
249static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
250{
251 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
252 struct drm_device *dev = crtc->dev;
253 struct radeon_device *rdev = dev->dev_private;
254 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
255 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
256
257 memset(&args, 0, sizeof(args));
258
259 args.ucDispPipeId = radeon_crtc->crtc_id;
260 args.ucEnable = state;
261
262 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
263}
264
265void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
266{
267 struct drm_device *dev = crtc->dev;
268 struct radeon_device *rdev = dev->dev_private;
269 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
270
271 switch (mode) {
272 case DRM_MODE_DPMS_ON:
273 radeon_crtc->enabled = true;
274 atombios_enable_crtc(crtc, ATOM_ENABLE);
275 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
276 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
277 atombios_blank_crtc(crtc, ATOM_DISABLE);
278 if (dev->num_crtcs > radeon_crtc->crtc_id)
279 drm_crtc_vblank_on(crtc);
280 radeon_crtc_load_lut(crtc);
281 break;
282 case DRM_MODE_DPMS_STANDBY:
283 case DRM_MODE_DPMS_SUSPEND:
284 case DRM_MODE_DPMS_OFF:
285 if (dev->num_crtcs > radeon_crtc->crtc_id)
286 drm_crtc_vblank_off(crtc);
287 if (radeon_crtc->enabled)
288 atombios_blank_crtc(crtc, ATOM_ENABLE);
289 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
290 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
291 atombios_enable_crtc(crtc, ATOM_DISABLE);
292 radeon_crtc->enabled = false;
293 break;
294 }
295
296 radeon_pm_compute_clocks(rdev);
297}
298
299static void
300atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
301 struct drm_display_mode *mode)
302{
303 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
304 struct drm_device *dev = crtc->dev;
305 struct radeon_device *rdev = dev->dev_private;
306 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
307 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
308 u16 misc = 0;
309
310 memset(&args, 0, sizeof(args));
311 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
312 args.usH_Blanking_Time =
313 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
314 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
315 args.usV_Blanking_Time =
316 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
317 args.usH_SyncOffset =
318 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
319 args.usH_SyncWidth =
320 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
321 args.usV_SyncOffset =
322 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
323 args.usV_SyncWidth =
324 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
325 args.ucH_Border = radeon_crtc->h_border;
326 args.ucV_Border = radeon_crtc->v_border;
327
328 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
329 misc |= ATOM_VSYNC_POLARITY;
330 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
331 misc |= ATOM_HSYNC_POLARITY;
332 if (mode->flags & DRM_MODE_FLAG_CSYNC)
333 misc |= ATOM_COMPOSITESYNC;
334 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
335 misc |= ATOM_INTERLACE;
336 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
337 misc |= ATOM_DOUBLE_CLOCK_MODE;
338 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
339 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
340
341 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
342 args.ucCRTC = radeon_crtc->crtc_id;
343
344 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
345}
346
347static void atombios_crtc_set_timing(struct drm_crtc *crtc,
348 struct drm_display_mode *mode)
349{
350 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
351 struct drm_device *dev = crtc->dev;
352 struct radeon_device *rdev = dev->dev_private;
353 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
354 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
355 u16 misc = 0;
356
357 memset(&args, 0, sizeof(args));
358 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
359 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
360 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
361 args.usH_SyncWidth =
362 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
363 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
364 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
365 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
366 args.usV_SyncWidth =
367 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
368
369 args.ucOverscanRight = radeon_crtc->h_border;
370 args.ucOverscanLeft = radeon_crtc->h_border;
371 args.ucOverscanBottom = radeon_crtc->v_border;
372 args.ucOverscanTop = radeon_crtc->v_border;
373
374 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
375 misc |= ATOM_VSYNC_POLARITY;
376 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
377 misc |= ATOM_HSYNC_POLARITY;
378 if (mode->flags & DRM_MODE_FLAG_CSYNC)
379 misc |= ATOM_COMPOSITESYNC;
380 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
381 misc |= ATOM_INTERLACE;
382 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
383 misc |= ATOM_DOUBLE_CLOCK_MODE;
384 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
385 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
386
387 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
388 args.ucCRTC = radeon_crtc->crtc_id;
389
390 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
391}
392
393static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
394{
395 u32 ss_cntl;
396
397 if (ASIC_IS_DCE4(rdev)) {
398 switch (pll_id) {
399 case ATOM_PPLL1:
400 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
401 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
402 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
403 break;
404 case ATOM_PPLL2:
405 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
406 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
407 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
408 break;
409 case ATOM_DCPLL:
410 case ATOM_PPLL_INVALID:
411 return;
412 }
413 } else if (ASIC_IS_AVIVO(rdev)) {
414 switch (pll_id) {
415 case ATOM_PPLL1:
416 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
417 ss_cntl &= ~1;
418 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
419 break;
420 case ATOM_PPLL2:
421 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
422 ss_cntl &= ~1;
423 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
424 break;
425 case ATOM_DCPLL:
426 case ATOM_PPLL_INVALID:
427 return;
428 }
429 }
430}
431
432
433union atom_enable_ss {
434 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
435 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
436 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
437 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
438 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
439};
440
441static void atombios_crtc_program_ss(struct radeon_device *rdev,
442 int enable,
443 int pll_id,
444 int crtc_id,
445 struct radeon_atom_ss *ss)
446{
447 unsigned i;
448 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
449 union atom_enable_ss args;
450
451 if (enable) {
452
453
454
455
456
457 if (ss->percentage == 0)
458 return;
459 if (ss->type & ATOM_EXTERNAL_SS_MASK)
460 return;
461 } else {
462 for (i = 0; i < rdev->num_crtc; i++) {
463 if (rdev->mode_info.crtcs[i] &&
464 rdev->mode_info.crtcs[i]->enabled &&
465 i != crtc_id &&
466 pll_id == rdev->mode_info.crtcs[i]->pll_id) {
467
468
469
470
471 return;
472 }
473 }
474 }
475
476 memset(&args, 0, sizeof(args));
477
478 if (ASIC_IS_DCE5(rdev)) {
479 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
480 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
481 switch (pll_id) {
482 case ATOM_PPLL1:
483 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
484 break;
485 case ATOM_PPLL2:
486 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
487 break;
488 case ATOM_DCPLL:
489 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
490 break;
491 case ATOM_PPLL_INVALID:
492 return;
493 }
494 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
495 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
496 args.v3.ucEnable = enable;
497 } else if (ASIC_IS_DCE4(rdev)) {
498 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
499 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
500 switch (pll_id) {
501 case ATOM_PPLL1:
502 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
503 break;
504 case ATOM_PPLL2:
505 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
506 break;
507 case ATOM_DCPLL:
508 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
509 break;
510 case ATOM_PPLL_INVALID:
511 return;
512 }
513 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
514 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
515 args.v2.ucEnable = enable;
516 } else if (ASIC_IS_DCE3(rdev)) {
517 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
518 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
519 args.v1.ucSpreadSpectrumStep = ss->step;
520 args.v1.ucSpreadSpectrumDelay = ss->delay;
521 args.v1.ucSpreadSpectrumRange = ss->range;
522 args.v1.ucPpll = pll_id;
523 args.v1.ucEnable = enable;
524 } else if (ASIC_IS_AVIVO(rdev)) {
525 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
526 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
527 atombios_disable_ss(rdev, pll_id);
528 return;
529 }
530 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
531 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
532 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
533 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
534 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
535 args.lvds_ss_2.ucEnable = enable;
536 } else {
537 if (enable == ATOM_DISABLE) {
538 atombios_disable_ss(rdev, pll_id);
539 return;
540 }
541 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
542 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
543 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
544 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
545 args.lvds_ss.ucEnable = enable;
546 }
547 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
548}
549
550union adjust_pixel_clock {
551 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
552 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
553};
554
555static u32 atombios_adjust_pll(struct drm_crtc *crtc,
556 struct drm_display_mode *mode)
557{
558 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
559 struct drm_device *dev = crtc->dev;
560 struct radeon_device *rdev = dev->dev_private;
561 struct drm_encoder *encoder = radeon_crtc->encoder;
562 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
563 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
564 u32 adjusted_clock = mode->clock;
565 int encoder_mode = atombios_get_encoder_mode(encoder);
566 u32 dp_clock = mode->clock;
567 u32 clock = mode->clock;
568 int bpc = radeon_crtc->bpc;
569 bool is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
570
571
572 radeon_crtc->pll_flags = 0;
573
574 if (ASIC_IS_AVIVO(rdev)) {
575 if ((rdev->family == CHIP_RS600) ||
576 (rdev->family == CHIP_RS690) ||
577 (rdev->family == CHIP_RS740))
578 radeon_crtc->pll_flags |= (
579 RADEON_PLL_PREFER_CLOSEST_LOWER);
580
581 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)
582 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
583 else
584 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
585
586 if (rdev->family < CHIP_RV770)
587 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
588
589 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
590 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
591
592 if (((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880))
593 && !radeon_crtc->ss_enabled)
594 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
595 if (ASIC_IS_DCE32(rdev) && mode->clock > 165000)
596 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
597 } else {
598 radeon_crtc->pll_flags |= RADEON_PLL_LEGACY;
599
600 if (mode->clock > 200000)
601 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
602 else
603 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
604 }
605
606 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
607 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
608 if (connector) {
609 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
610 struct radeon_connector_atom_dig *dig_connector =
611 radeon_connector->con_priv;
612
613 dp_clock = dig_connector->dp_clock;
614 }
615 }
616
617 if (radeon_encoder->is_mst_encoder) {
618 struct radeon_encoder_mst *mst_enc = radeon_encoder->enc_priv;
619 struct radeon_connector_atom_dig *dig_connector = mst_enc->connector->con_priv;
620
621 dp_clock = dig_connector->dp_clock;
622 }
623
624
625 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
626 if (radeon_crtc->ss_enabled) {
627 if (radeon_crtc->ss.refdiv) {
628 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
629 radeon_crtc->pll_reference_div = radeon_crtc->ss.refdiv;
630 if (ASIC_IS_AVIVO(rdev) &&
631 rdev->family != CHIP_RS780 &&
632 rdev->family != CHIP_RS880)
633 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
634 }
635 }
636 }
637
638 if (ASIC_IS_AVIVO(rdev)) {
639
640 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
641 adjusted_clock = mode->clock * 2;
642 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
643 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
644 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
645 radeon_crtc->pll_flags |= RADEON_PLL_IS_LCD;
646 } else {
647 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
648 radeon_crtc->pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
649 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
650 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
651 }
652
653
654 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
655 switch (bpc) {
656 case 8:
657 default:
658 break;
659 case 10:
660 clock = (clock * 5) / 4;
661 break;
662 case 12:
663 clock = (clock * 3) / 2;
664 break;
665 case 16:
666 clock = clock * 2;
667 break;
668 }
669 }
670
671
672
673
674
675 if (ASIC_IS_DCE3(rdev)) {
676 union adjust_pixel_clock args;
677 u8 frev, crev;
678 int index;
679
680 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
681 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
682 &crev))
683 return adjusted_clock;
684
685 memset(&args, 0, sizeof(args));
686
687 switch (frev) {
688 case 1:
689 switch (crev) {
690 case 1:
691 case 2:
692 args.v1.usPixelClock = cpu_to_le16(clock / 10);
693 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
694 args.v1.ucEncodeMode = encoder_mode;
695 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
696 args.v1.ucConfig |=
697 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
698
699 atom_execute_table(rdev->mode_info.atom_context,
700 index, (uint32_t *)&args);
701 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
702 break;
703 case 3:
704 args.v3.sInput.usPixelClock = cpu_to_le16(clock / 10);
705 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
706 args.v3.sInput.ucEncodeMode = encoder_mode;
707 args.v3.sInput.ucDispPllConfig = 0;
708 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
709 args.v3.sInput.ucDispPllConfig |=
710 DISPPLL_CONFIG_SS_ENABLE;
711 if (ENCODER_MODE_IS_DP(encoder_mode)) {
712 args.v3.sInput.ucDispPllConfig |=
713 DISPPLL_CONFIG_COHERENT_MODE;
714
715 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
716 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
717 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
718 if (dig->coherent_mode)
719 args.v3.sInput.ucDispPllConfig |=
720 DISPPLL_CONFIG_COHERENT_MODE;
721 if (is_duallink)
722 args.v3.sInput.ucDispPllConfig |=
723 DISPPLL_CONFIG_DUAL_LINK;
724 }
725 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
726 ENCODER_OBJECT_ID_NONE)
727 args.v3.sInput.ucExtTransmitterID =
728 radeon_encoder_get_dp_bridge_encoder_id(encoder);
729 else
730 args.v3.sInput.ucExtTransmitterID = 0;
731
732 atom_execute_table(rdev->mode_info.atom_context,
733 index, (uint32_t *)&args);
734 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
735 if (args.v3.sOutput.ucRefDiv) {
736 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
737 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
738 radeon_crtc->pll_reference_div = args.v3.sOutput.ucRefDiv;
739 }
740 if (args.v3.sOutput.ucPostDiv) {
741 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
742 radeon_crtc->pll_flags |= RADEON_PLL_USE_POST_DIV;
743 radeon_crtc->pll_post_div = args.v3.sOutput.ucPostDiv;
744 }
745 break;
746 default:
747 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
748 return adjusted_clock;
749 }
750 break;
751 default:
752 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
753 return adjusted_clock;
754 }
755 }
756 return adjusted_clock;
757}
758
759union set_pixel_clock {
760 SET_PIXEL_CLOCK_PS_ALLOCATION base;
761 PIXEL_CLOCK_PARAMETERS v1;
762 PIXEL_CLOCK_PARAMETERS_V2 v2;
763 PIXEL_CLOCK_PARAMETERS_V3 v3;
764 PIXEL_CLOCK_PARAMETERS_V5 v5;
765 PIXEL_CLOCK_PARAMETERS_V6 v6;
766};
767
768
769
770
771static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
772 u32 dispclk)
773{
774 u8 frev, crev;
775 int index;
776 union set_pixel_clock args;
777
778 memset(&args, 0, sizeof(args));
779
780 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
781 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
782 &crev))
783 return;
784
785 switch (frev) {
786 case 1:
787 switch (crev) {
788 case 5:
789
790
791
792 args.v5.ucCRTC = ATOM_CRTC_INVALID;
793 args.v5.usPixelClock = cpu_to_le16(dispclk);
794 args.v5.ucPpll = ATOM_DCPLL;
795 break;
796 case 6:
797
798
799
800 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
801 if (ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
802 args.v6.ucPpll = ATOM_EXT_PLL1;
803 else if (ASIC_IS_DCE6(rdev))
804 args.v6.ucPpll = ATOM_PPLL0;
805 else
806 args.v6.ucPpll = ATOM_DCPLL;
807 break;
808 default:
809 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
810 return;
811 }
812 break;
813 default:
814 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
815 return;
816 }
817 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
818}
819
820static void atombios_crtc_program_pll(struct drm_crtc *crtc,
821 u32 crtc_id,
822 int pll_id,
823 u32 encoder_mode,
824 u32 encoder_id,
825 u32 clock,
826 u32 ref_div,
827 u32 fb_div,
828 u32 frac_fb_div,
829 u32 post_div,
830 int bpc,
831 bool ss_enabled,
832 struct radeon_atom_ss *ss)
833{
834 struct drm_device *dev = crtc->dev;
835 struct radeon_device *rdev = dev->dev_private;
836 u8 frev, crev;
837 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
838 union set_pixel_clock args;
839
840 memset(&args, 0, sizeof(args));
841
842 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
843 &crev))
844 return;
845
846 switch (frev) {
847 case 1:
848 switch (crev) {
849 case 1:
850 if (clock == ATOM_DISABLE)
851 return;
852 args.v1.usPixelClock = cpu_to_le16(clock / 10);
853 args.v1.usRefDiv = cpu_to_le16(ref_div);
854 args.v1.usFbDiv = cpu_to_le16(fb_div);
855 args.v1.ucFracFbDiv = frac_fb_div;
856 args.v1.ucPostDiv = post_div;
857 args.v1.ucPpll = pll_id;
858 args.v1.ucCRTC = crtc_id;
859 args.v1.ucRefDivSrc = 1;
860 break;
861 case 2:
862 args.v2.usPixelClock = cpu_to_le16(clock / 10);
863 args.v2.usRefDiv = cpu_to_le16(ref_div);
864 args.v2.usFbDiv = cpu_to_le16(fb_div);
865 args.v2.ucFracFbDiv = frac_fb_div;
866 args.v2.ucPostDiv = post_div;
867 args.v2.ucPpll = pll_id;
868 args.v2.ucCRTC = crtc_id;
869 args.v2.ucRefDivSrc = 1;
870 break;
871 case 3:
872 args.v3.usPixelClock = cpu_to_le16(clock / 10);
873 args.v3.usRefDiv = cpu_to_le16(ref_div);
874 args.v3.usFbDiv = cpu_to_le16(fb_div);
875 args.v3.ucFracFbDiv = frac_fb_div;
876 args.v3.ucPostDiv = post_div;
877 args.v3.ucPpll = pll_id;
878 if (crtc_id == ATOM_CRTC2)
879 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2;
880 else
881 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1;
882 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
883 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
884 args.v3.ucTransmitterId = encoder_id;
885 args.v3.ucEncoderMode = encoder_mode;
886 break;
887 case 5:
888 args.v5.ucCRTC = crtc_id;
889 args.v5.usPixelClock = cpu_to_le16(clock / 10);
890 args.v5.ucRefDiv = ref_div;
891 args.v5.usFbDiv = cpu_to_le16(fb_div);
892 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
893 args.v5.ucPostDiv = post_div;
894 args.v5.ucMiscInfo = 0;
895 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
896 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
897 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
898 switch (bpc) {
899 case 8:
900 default:
901 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
902 break;
903 case 10:
904
905 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_32BPP;
906 break;
907 case 12:
908
909 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
910 break;
911 }
912 }
913 args.v5.ucTransmitterID = encoder_id;
914 args.v5.ucEncoderMode = encoder_mode;
915 args.v5.ucPpll = pll_id;
916 break;
917 case 6:
918 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
919 args.v6.ucRefDiv = ref_div;
920 args.v6.usFbDiv = cpu_to_le16(fb_div);
921 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
922 args.v6.ucPostDiv = post_div;
923 args.v6.ucMiscInfo = 0;
924 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
925 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
926 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
927 switch (bpc) {
928 case 8:
929 default:
930 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
931 break;
932 case 10:
933 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP_V6;
934 break;
935 case 12:
936 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP_V6;
937 break;
938 case 16:
939 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
940 break;
941 }
942 }
943 args.v6.ucTransmitterID = encoder_id;
944 args.v6.ucEncoderMode = encoder_mode;
945 args.v6.ucPpll = pll_id;
946 break;
947 default:
948 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
949 return;
950 }
951 break;
952 default:
953 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
954 return;
955 }
956
957 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
958}
959
960static bool atombios_crtc_prepare_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
961{
962 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
963 struct drm_device *dev = crtc->dev;
964 struct radeon_device *rdev = dev->dev_private;
965 struct radeon_encoder *radeon_encoder =
966 to_radeon_encoder(radeon_crtc->encoder);
967 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
968
969 radeon_crtc->bpc = 8;
970 radeon_crtc->ss_enabled = false;
971
972 if (radeon_encoder->is_mst_encoder) {
973 radeon_dp_mst_prepare_pll(crtc, mode);
974 } else if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
975 (radeon_encoder_get_dp_bridge_encoder_id(radeon_crtc->encoder) != ENCODER_OBJECT_ID_NONE)) {
976 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
977 struct drm_connector *connector =
978 radeon_get_connector_for_encoder(radeon_crtc->encoder);
979 struct radeon_connector *radeon_connector =
980 to_radeon_connector(connector);
981 struct radeon_connector_atom_dig *dig_connector =
982 radeon_connector->con_priv;
983 int dp_clock;
984
985
986 radeon_connector->pixelclock_for_modeset = mode->clock;
987 radeon_crtc->bpc = radeon_get_monitor_bpc(connector);
988
989 switch (encoder_mode) {
990 case ATOM_ENCODER_MODE_DP_MST:
991 case ATOM_ENCODER_MODE_DP:
992
993 dp_clock = dig_connector->dp_clock / 10;
994 if (ASIC_IS_DCE4(rdev))
995 radeon_crtc->ss_enabled =
996 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss,
997 ASIC_INTERNAL_SS_ON_DP,
998 dp_clock);
999 else {
1000 if (dp_clock == 16200) {
1001 radeon_crtc->ss_enabled =
1002 radeon_atombios_get_ppll_ss_info(rdev,
1003 &radeon_crtc->ss,
1004 ATOM_DP_SS_ID2);
1005 if (!radeon_crtc->ss_enabled)
1006 radeon_crtc->ss_enabled =
1007 radeon_atombios_get_ppll_ss_info(rdev,
1008 &radeon_crtc->ss,
1009 ATOM_DP_SS_ID1);
1010 } else {
1011 radeon_crtc->ss_enabled =
1012 radeon_atombios_get_ppll_ss_info(rdev,
1013 &radeon_crtc->ss,
1014 ATOM_DP_SS_ID1);
1015 }
1016
1017 radeon_crtc->ss_enabled = false;
1018 }
1019 break;
1020 case ATOM_ENCODER_MODE_LVDS:
1021 if (ASIC_IS_DCE4(rdev))
1022 radeon_crtc->ss_enabled =
1023 radeon_atombios_get_asic_ss_info(rdev,
1024 &radeon_crtc->ss,
1025 dig->lcd_ss_id,
1026 mode->clock / 10);
1027 else
1028 radeon_crtc->ss_enabled =
1029 radeon_atombios_get_ppll_ss_info(rdev,
1030 &radeon_crtc->ss,
1031 dig->lcd_ss_id);
1032 break;
1033 case ATOM_ENCODER_MODE_DVI:
1034 if (ASIC_IS_DCE4(rdev))
1035 radeon_crtc->ss_enabled =
1036 radeon_atombios_get_asic_ss_info(rdev,
1037 &radeon_crtc->ss,
1038 ASIC_INTERNAL_SS_ON_TMDS,
1039 mode->clock / 10);
1040 break;
1041 case ATOM_ENCODER_MODE_HDMI:
1042 if (ASIC_IS_DCE4(rdev))
1043 radeon_crtc->ss_enabled =
1044 radeon_atombios_get_asic_ss_info(rdev,
1045 &radeon_crtc->ss,
1046 ASIC_INTERNAL_SS_ON_HDMI,
1047 mode->clock / 10);
1048 break;
1049 default:
1050 break;
1051 }
1052 }
1053
1054
1055 radeon_crtc->adjusted_clock = atombios_adjust_pll(crtc, mode);
1056
1057 return true;
1058}
1059
1060static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
1061{
1062 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1063 struct drm_device *dev = crtc->dev;
1064 struct radeon_device *rdev = dev->dev_private;
1065 struct radeon_encoder *radeon_encoder =
1066 to_radeon_encoder(radeon_crtc->encoder);
1067 u32 pll_clock = mode->clock;
1068 u32 clock = mode->clock;
1069 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
1070 struct radeon_pll *pll;
1071 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
1072
1073
1074 if (ASIC_IS_DCE5(rdev) &&
1075 (encoder_mode == ATOM_ENCODER_MODE_HDMI) &&
1076 (radeon_crtc->bpc > 8))
1077 clock = radeon_crtc->adjusted_clock;
1078
1079 switch (radeon_crtc->pll_id) {
1080 case ATOM_PPLL1:
1081 pll = &rdev->clock.p1pll;
1082 break;
1083 case ATOM_PPLL2:
1084 pll = &rdev->clock.p2pll;
1085 break;
1086 case ATOM_DCPLL:
1087 case ATOM_PPLL_INVALID:
1088 default:
1089 pll = &rdev->clock.dcpll;
1090 break;
1091 }
1092
1093
1094 pll->flags = radeon_crtc->pll_flags;
1095 pll->reference_div = radeon_crtc->pll_reference_div;
1096 pll->post_div = radeon_crtc->pll_post_div;
1097
1098 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1099
1100 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1101 &fb_div, &frac_fb_div, &ref_div, &post_div);
1102 else if (ASIC_IS_AVIVO(rdev))
1103 radeon_compute_pll_avivo(pll, radeon_crtc->adjusted_clock, &pll_clock,
1104 &fb_div, &frac_fb_div, &ref_div, &post_div);
1105 else
1106 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1107 &fb_div, &frac_fb_div, &ref_div, &post_div);
1108
1109 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id,
1110 radeon_crtc->crtc_id, &radeon_crtc->ss);
1111
1112 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1113 encoder_mode, radeon_encoder->encoder_id, clock,
1114 ref_div, fb_div, frac_fb_div, post_div,
1115 radeon_crtc->bpc, radeon_crtc->ss_enabled, &radeon_crtc->ss);
1116
1117 if (radeon_crtc->ss_enabled) {
1118
1119 if (ASIC_IS_DCE4(rdev)) {
1120 u32 step_size;
1121 u32 amount = (((fb_div * 10) + frac_fb_div) *
1122 (u32)radeon_crtc->ss.percentage) /
1123 (100 * (u32)radeon_crtc->ss.percentage_divider);
1124 radeon_crtc->ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1125 radeon_crtc->ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1126 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1127 if (radeon_crtc->ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1128 step_size = (4 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1129 (125 * 25 * pll->reference_freq / 100);
1130 else
1131 step_size = (2 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1132 (125 * 25 * pll->reference_freq / 100);
1133 radeon_crtc->ss.step = step_size;
1134 }
1135
1136 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id,
1137 radeon_crtc->crtc_id, &radeon_crtc->ss);
1138 }
1139}
1140
1141static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1142 struct drm_framebuffer *fb,
1143 int x, int y, int atomic)
1144{
1145 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1146 struct drm_device *dev = crtc->dev;
1147 struct radeon_device *rdev = dev->dev_private;
1148 struct radeon_framebuffer *radeon_fb;
1149 struct drm_framebuffer *target_fb;
1150 struct drm_gem_object *obj;
1151 struct radeon_bo *rbo;
1152 uint64_t fb_location;
1153 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1154 unsigned bankw, bankh, mtaspect, tile_split;
1155 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1156 u32 tmp, viewport_w, viewport_h;
1157 int r;
1158 bool bypass_lut = false;
1159 struct drm_format_name_buf format_name;
1160
1161
1162 if (!atomic && !crtc->primary->fb) {
1163 DRM_DEBUG_KMS("No FB bound\n");
1164 return 0;
1165 }
1166
1167 if (atomic) {
1168 radeon_fb = to_radeon_framebuffer(fb);
1169 target_fb = fb;
1170 }
1171 else {
1172 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
1173 target_fb = crtc->primary->fb;
1174 }
1175
1176
1177
1178
1179 obj = radeon_fb->obj;
1180 rbo = gem_to_radeon_bo(obj);
1181 r = radeon_bo_reserve(rbo, false);
1182 if (unlikely(r != 0))
1183 return r;
1184
1185 if (atomic)
1186 fb_location = radeon_bo_gpu_offset(rbo);
1187 else {
1188 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1189 if (unlikely(r != 0)) {
1190 radeon_bo_unreserve(rbo);
1191 return -EINVAL;
1192 }
1193 }
1194
1195 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1196 radeon_bo_unreserve(rbo);
1197
1198 switch (target_fb->format->format) {
1199 case DRM_FORMAT_C8:
1200 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1201 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1202 break;
1203 case DRM_FORMAT_XRGB4444:
1204 case DRM_FORMAT_ARGB4444:
1205 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1206 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB4444));
1207#ifdef __BIG_ENDIAN
1208 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1209#endif
1210 break;
1211 case DRM_FORMAT_XRGB1555:
1212 case DRM_FORMAT_ARGB1555:
1213 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1214 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1215#ifdef __BIG_ENDIAN
1216 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1217#endif
1218 break;
1219 case DRM_FORMAT_BGRX5551:
1220 case DRM_FORMAT_BGRA5551:
1221 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1222 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA5551));
1223#ifdef __BIG_ENDIAN
1224 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1225#endif
1226 break;
1227 case DRM_FORMAT_RGB565:
1228 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1229 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1230#ifdef __BIG_ENDIAN
1231 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1232#endif
1233 break;
1234 case DRM_FORMAT_XRGB8888:
1235 case DRM_FORMAT_ARGB8888:
1236 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1237 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1238#ifdef __BIG_ENDIAN
1239 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1240#endif
1241 break;
1242 case DRM_FORMAT_XRGB2101010:
1243 case DRM_FORMAT_ARGB2101010:
1244 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1245 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB2101010));
1246#ifdef __BIG_ENDIAN
1247 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1248#endif
1249
1250 bypass_lut = true;
1251 break;
1252 case DRM_FORMAT_BGRX1010102:
1253 case DRM_FORMAT_BGRA1010102:
1254 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1255 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA1010102));
1256#ifdef __BIG_ENDIAN
1257 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1258#endif
1259
1260 bypass_lut = true;
1261 break;
1262 default:
1263 DRM_ERROR("Unsupported screen format %s\n",
1264 drm_get_format_name(target_fb->format->format, &format_name));
1265 return -EINVAL;
1266 }
1267
1268 if (tiling_flags & RADEON_TILING_MACRO) {
1269 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1270
1271
1272 if (rdev->family >= CHIP_TAHITI) {
1273 unsigned index, num_banks;
1274
1275 if (rdev->family >= CHIP_BONAIRE) {
1276 unsigned tileb, tile_split_bytes;
1277
1278
1279 tile_split_bytes = 64 << tile_split;
1280 tileb = 8 * 8 * target_fb->format->cpp[0];
1281 tileb = min(tile_split_bytes, tileb);
1282
1283 for (index = 0; tileb > 64; index++)
1284 tileb >>= 1;
1285
1286 if (index >= 16) {
1287 DRM_ERROR("Wrong screen bpp (%u) or tile split (%u)\n",
1288 target_fb->format->cpp[0] * 8,
1289 tile_split);
1290 return -EINVAL;
1291 }
1292
1293 num_banks = (rdev->config.cik.macrotile_mode_array[index] >> 6) & 0x3;
1294 } else {
1295 switch (target_fb->format->cpp[0] * 8) {
1296 case 8:
1297 index = 10;
1298 break;
1299 case 16:
1300 index = SI_TILE_MODE_COLOR_2D_SCANOUT_16BPP;
1301 break;
1302 default:
1303 case 32:
1304 index = SI_TILE_MODE_COLOR_2D_SCANOUT_32BPP;
1305 break;
1306 }
1307
1308 num_banks = (rdev->config.si.tile_mode_array[index] >> 20) & 0x3;
1309 }
1310
1311 fb_format |= EVERGREEN_GRPH_NUM_BANKS(num_banks);
1312 } else {
1313
1314 if (rdev->family >= CHIP_CAYMAN)
1315 tmp = rdev->config.cayman.tile_config;
1316 else
1317 tmp = rdev->config.evergreen.tile_config;
1318
1319 switch ((tmp & 0xf0) >> 4) {
1320 case 0:
1321 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1322 break;
1323 case 1:
1324 default:
1325 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1326 break;
1327 case 2:
1328 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1329 break;
1330 }
1331 }
1332
1333 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1334 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1335 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1336 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1337 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1338 if (rdev->family >= CHIP_BONAIRE) {
1339
1340 fb_format |= CIK_GRPH_MICRO_TILE_MODE(CIK_DISPLAY_MICRO_TILING);
1341 }
1342 } else if (tiling_flags & RADEON_TILING_MICRO)
1343 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1344
1345 if (rdev->family >= CHIP_BONAIRE) {
1346
1347
1348
1349 u32 pipe_config = (rdev->config.cik.tile_mode_array[10] >> 6) & 0x1f;
1350
1351 fb_format |= CIK_GRPH_PIPE_CONFIG(pipe_config);
1352 } else if ((rdev->family == CHIP_TAHITI) ||
1353 (rdev->family == CHIP_PITCAIRN))
1354 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1355 else if ((rdev->family == CHIP_VERDE) ||
1356 (rdev->family == CHIP_OLAND) ||
1357 (rdev->family == CHIP_HAINAN))
1358 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1359
1360 switch (radeon_crtc->crtc_id) {
1361 case 0:
1362 WREG32(AVIVO_D1VGA_CONTROL, 0);
1363 break;
1364 case 1:
1365 WREG32(AVIVO_D2VGA_CONTROL, 0);
1366 break;
1367 case 2:
1368 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1369 break;
1370 case 3:
1371 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1372 break;
1373 case 4:
1374 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1375 break;
1376 case 5:
1377 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1378 break;
1379 default:
1380 break;
1381 }
1382
1383
1384
1385
1386 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1387
1388 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1389 upper_32_bits(fb_location));
1390 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1391 upper_32_bits(fb_location));
1392 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1393 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1394 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1395 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1396 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1397 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1398
1399
1400
1401
1402
1403
1404 WREG32_P(EVERGREEN_GRPH_LUT_10BIT_BYPASS_CONTROL + radeon_crtc->crtc_offset,
1405 (bypass_lut ? EVERGREEN_LUT_10BIT_BYPASS_EN : 0),
1406 ~EVERGREEN_LUT_10BIT_BYPASS_EN);
1407
1408 if (bypass_lut)
1409 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1410
1411 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1412 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1413 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1414 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1415 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1416 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1417
1418 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0];
1419 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1420 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1421
1422 if (rdev->family >= CHIP_BONAIRE)
1423 WREG32(CIK_LB_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1424 target_fb->height);
1425 else
1426 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1427 target_fb->height);
1428 x &= ~3;
1429 y &= ~1;
1430 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1431 (x << 16) | y);
1432 viewport_w = crtc->mode.hdisplay;
1433 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1434 if ((rdev->family >= CHIP_BONAIRE) &&
1435 (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE))
1436 viewport_h *= 2;
1437 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1438 (viewport_w << 16) | viewport_h);
1439
1440
1441 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1442
1443 if (!atomic && fb && fb != crtc->primary->fb) {
1444 radeon_fb = to_radeon_framebuffer(fb);
1445 rbo = gem_to_radeon_bo(radeon_fb->obj);
1446 r = radeon_bo_reserve(rbo, false);
1447 if (unlikely(r != 0))
1448 return r;
1449 radeon_bo_unpin(rbo);
1450 radeon_bo_unreserve(rbo);
1451 }
1452
1453
1454 radeon_bandwidth_update(rdev);
1455
1456 return 0;
1457}
1458
1459static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1460 struct drm_framebuffer *fb,
1461 int x, int y, int atomic)
1462{
1463 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1464 struct drm_device *dev = crtc->dev;
1465 struct radeon_device *rdev = dev->dev_private;
1466 struct radeon_framebuffer *radeon_fb;
1467 struct drm_gem_object *obj;
1468 struct radeon_bo *rbo;
1469 struct drm_framebuffer *target_fb;
1470 uint64_t fb_location;
1471 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1472 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1473 u32 viewport_w, viewport_h;
1474 int r;
1475 bool bypass_lut = false;
1476 struct drm_format_name_buf format_name;
1477
1478
1479 if (!atomic && !crtc->primary->fb) {
1480 DRM_DEBUG_KMS("No FB bound\n");
1481 return 0;
1482 }
1483
1484 if (atomic) {
1485 radeon_fb = to_radeon_framebuffer(fb);
1486 target_fb = fb;
1487 }
1488 else {
1489 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
1490 target_fb = crtc->primary->fb;
1491 }
1492
1493 obj = radeon_fb->obj;
1494 rbo = gem_to_radeon_bo(obj);
1495 r = radeon_bo_reserve(rbo, false);
1496 if (unlikely(r != 0))
1497 return r;
1498
1499
1500
1501
1502 if (atomic)
1503 fb_location = radeon_bo_gpu_offset(rbo);
1504 else {
1505 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1506 if (unlikely(r != 0)) {
1507 radeon_bo_unreserve(rbo);
1508 return -EINVAL;
1509 }
1510 }
1511 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1512 radeon_bo_unreserve(rbo);
1513
1514 switch (target_fb->format->format) {
1515 case DRM_FORMAT_C8:
1516 fb_format =
1517 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1518 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1519 break;
1520 case DRM_FORMAT_XRGB4444:
1521 case DRM_FORMAT_ARGB4444:
1522 fb_format =
1523 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1524 AVIVO_D1GRPH_CONTROL_16BPP_ARGB4444;
1525#ifdef __BIG_ENDIAN
1526 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1527#endif
1528 break;
1529 case DRM_FORMAT_XRGB1555:
1530 fb_format =
1531 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1532 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1533#ifdef __BIG_ENDIAN
1534 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1535#endif
1536 break;
1537 case DRM_FORMAT_RGB565:
1538 fb_format =
1539 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1540 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1541#ifdef __BIG_ENDIAN
1542 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1543#endif
1544 break;
1545 case DRM_FORMAT_XRGB8888:
1546 case DRM_FORMAT_ARGB8888:
1547 fb_format =
1548 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1549 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1550#ifdef __BIG_ENDIAN
1551 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1552#endif
1553 break;
1554 case DRM_FORMAT_XRGB2101010:
1555 case DRM_FORMAT_ARGB2101010:
1556 fb_format =
1557 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1558 AVIVO_D1GRPH_CONTROL_32BPP_ARGB2101010;
1559#ifdef __BIG_ENDIAN
1560 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1561#endif
1562
1563 bypass_lut = true;
1564 break;
1565 default:
1566 DRM_ERROR("Unsupported screen format %s\n",
1567 drm_get_format_name(target_fb->format->format, &format_name));
1568 return -EINVAL;
1569 }
1570
1571 if (rdev->family >= CHIP_R600) {
1572 if (tiling_flags & RADEON_TILING_MACRO)
1573 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1574 else if (tiling_flags & RADEON_TILING_MICRO)
1575 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1576 } else {
1577 if (tiling_flags & RADEON_TILING_MACRO)
1578 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1579
1580 if (tiling_flags & RADEON_TILING_MICRO)
1581 fb_format |= AVIVO_D1GRPH_TILED;
1582 }
1583
1584 if (radeon_crtc->crtc_id == 0)
1585 WREG32(AVIVO_D1VGA_CONTROL, 0);
1586 else
1587 WREG32(AVIVO_D2VGA_CONTROL, 0);
1588
1589
1590
1591
1592 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1593
1594 if (rdev->family >= CHIP_RV770) {
1595 if (radeon_crtc->crtc_id) {
1596 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1597 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1598 } else {
1599 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1600 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1601 }
1602 }
1603 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1604 (u32) fb_location);
1605 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1606 radeon_crtc->crtc_offset, (u32) fb_location);
1607 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1608 if (rdev->family >= CHIP_R600)
1609 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1610
1611
1612 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset,
1613 (bypass_lut ? AVIVO_LUT_10BIT_BYPASS_EN : 0), ~AVIVO_LUT_10BIT_BYPASS_EN);
1614
1615 if (bypass_lut)
1616 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1617
1618 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1619 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1620 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1621 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1622 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1623 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1624
1625 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0];
1626 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1627 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1628
1629 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1630 target_fb->height);
1631 x &= ~3;
1632 y &= ~1;
1633 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1634 (x << 16) | y);
1635 viewport_w = crtc->mode.hdisplay;
1636 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1637 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1638 (viewport_w << 16) | viewport_h);
1639
1640
1641 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 3);
1642
1643 if (!atomic && fb && fb != crtc->primary->fb) {
1644 radeon_fb = to_radeon_framebuffer(fb);
1645 rbo = gem_to_radeon_bo(radeon_fb->obj);
1646 r = radeon_bo_reserve(rbo, false);
1647 if (unlikely(r != 0))
1648 return r;
1649 radeon_bo_unpin(rbo);
1650 radeon_bo_unreserve(rbo);
1651 }
1652
1653
1654 radeon_bandwidth_update(rdev);
1655
1656 return 0;
1657}
1658
1659int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1660 struct drm_framebuffer *old_fb)
1661{
1662 struct drm_device *dev = crtc->dev;
1663 struct radeon_device *rdev = dev->dev_private;
1664
1665 if (ASIC_IS_DCE4(rdev))
1666 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1667 else if (ASIC_IS_AVIVO(rdev))
1668 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1669 else
1670 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1671}
1672
1673int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1674 struct drm_framebuffer *fb,
1675 int x, int y, enum mode_set_atomic state)
1676{
1677 struct drm_device *dev = crtc->dev;
1678 struct radeon_device *rdev = dev->dev_private;
1679
1680 if (ASIC_IS_DCE4(rdev))
1681 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1682 else if (ASIC_IS_AVIVO(rdev))
1683 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1684 else
1685 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1686}
1687
1688
1689static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1690{
1691 struct drm_device *dev = crtc->dev;
1692 struct radeon_device *rdev = dev->dev_private;
1693 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1694 u32 disp_merge_cntl;
1695
1696 switch (radeon_crtc->crtc_id) {
1697 case 0:
1698 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1699 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1700 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1701 break;
1702 case 1:
1703 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1704 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1705 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1706 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1707 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1708 break;
1709 }
1710}
1711
1712
1713
1714
1715
1716
1717
1718
1719static u32 radeon_get_pll_use_mask(struct drm_crtc *crtc)
1720{
1721 struct drm_device *dev = crtc->dev;
1722 struct drm_crtc *test_crtc;
1723 struct radeon_crtc *test_radeon_crtc;
1724 u32 pll_in_use = 0;
1725
1726 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1727 if (crtc == test_crtc)
1728 continue;
1729
1730 test_radeon_crtc = to_radeon_crtc(test_crtc);
1731 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1732 pll_in_use |= (1 << test_radeon_crtc->pll_id);
1733 }
1734 return pll_in_use;
1735}
1736
1737
1738
1739
1740
1741
1742
1743
1744
1745
1746static int radeon_get_shared_dp_ppll(struct drm_crtc *crtc)
1747{
1748 struct drm_device *dev = crtc->dev;
1749 struct radeon_device *rdev = dev->dev_private;
1750 struct drm_crtc *test_crtc;
1751 struct radeon_crtc *test_radeon_crtc;
1752
1753 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1754 if (crtc == test_crtc)
1755 continue;
1756 test_radeon_crtc = to_radeon_crtc(test_crtc);
1757 if (test_radeon_crtc->encoder &&
1758 ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1759
1760 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1761 test_radeon_crtc->pll_id == ATOM_PPLL2)
1762 continue;
1763
1764 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1765 return test_radeon_crtc->pll_id;
1766 }
1767 }
1768 return ATOM_PPLL_INVALID;
1769}
1770
1771
1772
1773
1774
1775
1776
1777
1778
1779
1780static int radeon_get_shared_nondp_ppll(struct drm_crtc *crtc)
1781{
1782 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1783 struct drm_device *dev = crtc->dev;
1784 struct radeon_device *rdev = dev->dev_private;
1785 struct drm_crtc *test_crtc;
1786 struct radeon_crtc *test_radeon_crtc;
1787 u32 adjusted_clock, test_adjusted_clock;
1788
1789 adjusted_clock = radeon_crtc->adjusted_clock;
1790
1791 if (adjusted_clock == 0)
1792 return ATOM_PPLL_INVALID;
1793
1794 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1795 if (crtc == test_crtc)
1796 continue;
1797 test_radeon_crtc = to_radeon_crtc(test_crtc);
1798 if (test_radeon_crtc->encoder &&
1799 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1800
1801 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1802 test_radeon_crtc->pll_id == ATOM_PPLL2)
1803 continue;
1804
1805 if (test_radeon_crtc->connector == radeon_crtc->connector) {
1806
1807 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1808 return test_radeon_crtc->pll_id;
1809 }
1810
1811 test_adjusted_clock = test_radeon_crtc->adjusted_clock;
1812 if ((crtc->mode.clock == test_crtc->mode.clock) &&
1813 (adjusted_clock == test_adjusted_clock) &&
1814 (radeon_crtc->ss_enabled == test_radeon_crtc->ss_enabled) &&
1815 (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID))
1816 return test_radeon_crtc->pll_id;
1817 }
1818 }
1819 return ATOM_PPLL_INVALID;
1820}
1821
1822
1823
1824
1825
1826
1827
1828
1829
1830
1831
1832
1833
1834
1835
1836
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1860{
1861 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1862 struct drm_device *dev = crtc->dev;
1863 struct radeon_device *rdev = dev->dev_private;
1864 struct radeon_encoder *radeon_encoder =
1865 to_radeon_encoder(radeon_crtc->encoder);
1866 u32 pll_in_use;
1867 int pll;
1868
1869 if (ASIC_IS_DCE8(rdev)) {
1870 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1871 if (rdev->clock.dp_extclk)
1872
1873 return ATOM_PPLL_INVALID;
1874 else {
1875
1876 pll = radeon_get_shared_dp_ppll(crtc);
1877 if (pll != ATOM_PPLL_INVALID)
1878 return pll;
1879 }
1880 } else {
1881
1882 pll = radeon_get_shared_nondp_ppll(crtc);
1883 if (pll != ATOM_PPLL_INVALID)
1884 return pll;
1885 }
1886
1887 if ((rdev->family == CHIP_KABINI) ||
1888 (rdev->family == CHIP_MULLINS)) {
1889
1890 pll_in_use = radeon_get_pll_use_mask(crtc);
1891 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1892 return ATOM_PPLL2;
1893 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1894 return ATOM_PPLL1;
1895 DRM_ERROR("unable to allocate a PPLL\n");
1896 return ATOM_PPLL_INVALID;
1897 } else {
1898
1899 pll_in_use = radeon_get_pll_use_mask(crtc);
1900 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1901 return ATOM_PPLL2;
1902 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1903 return ATOM_PPLL1;
1904 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1905 return ATOM_PPLL0;
1906 DRM_ERROR("unable to allocate a PPLL\n");
1907 return ATOM_PPLL_INVALID;
1908 }
1909 } else if (ASIC_IS_DCE61(rdev)) {
1910 struct radeon_encoder_atom_dig *dig =
1911 radeon_encoder->enc_priv;
1912
1913 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1914 (dig->linkb == false))
1915
1916 return ATOM_PPLL2;
1917 else if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1918
1919 if (rdev->clock.dp_extclk)
1920
1921 return ATOM_PPLL_INVALID;
1922 else {
1923
1924 pll = radeon_get_shared_dp_ppll(crtc);
1925 if (pll != ATOM_PPLL_INVALID)
1926 return pll;
1927 }
1928 } else {
1929
1930 pll = radeon_get_shared_nondp_ppll(crtc);
1931 if (pll != ATOM_PPLL_INVALID)
1932 return pll;
1933 }
1934
1935 pll_in_use = radeon_get_pll_use_mask(crtc);
1936 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1937 return ATOM_PPLL0;
1938 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1939 return ATOM_PPLL1;
1940 DRM_ERROR("unable to allocate a PPLL\n");
1941 return ATOM_PPLL_INVALID;
1942 } else if (ASIC_IS_DCE41(rdev)) {
1943
1944 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1945 if (rdev->clock.dp_extclk)
1946
1947 return ATOM_PPLL_INVALID;
1948 }
1949 pll_in_use = radeon_get_pll_use_mask(crtc);
1950 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1951 return ATOM_PPLL1;
1952 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1953 return ATOM_PPLL2;
1954 DRM_ERROR("unable to allocate a PPLL\n");
1955 return ATOM_PPLL_INVALID;
1956 } else if (ASIC_IS_DCE4(rdev)) {
1957
1958
1959
1960
1961
1962
1963
1964
1965
1966
1967 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1968 if (rdev->clock.dp_extclk)
1969
1970 return ATOM_PPLL_INVALID;
1971 else if (ASIC_IS_DCE6(rdev))
1972
1973 return ATOM_PPLL0;
1974 else if (ASIC_IS_DCE5(rdev))
1975
1976 return ATOM_DCPLL;
1977 else {
1978
1979 pll = radeon_get_shared_dp_ppll(crtc);
1980 if (pll != ATOM_PPLL_INVALID)
1981 return pll;
1982 }
1983 } else {
1984
1985 pll = radeon_get_shared_nondp_ppll(crtc);
1986 if (pll != ATOM_PPLL_INVALID)
1987 return pll;
1988 }
1989
1990 pll_in_use = radeon_get_pll_use_mask(crtc);
1991 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1992 return ATOM_PPLL1;
1993 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1994 return ATOM_PPLL2;
1995 DRM_ERROR("unable to allocate a PPLL\n");
1996 return ATOM_PPLL_INVALID;
1997 } else {
1998
1999
2000
2001
2002
2003
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013 return radeon_crtc->crtc_id;
2014 }
2015}
2016
2017void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
2018{
2019
2020 if (ASIC_IS_DCE6(rdev))
2021 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2022 else if (ASIC_IS_DCE4(rdev)) {
2023 struct radeon_atom_ss ss;
2024 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
2025 ASIC_INTERNAL_SS_ON_DCPLL,
2026 rdev->clock.default_dispclk);
2027 if (ss_enabled)
2028 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
2029
2030 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2031 if (ss_enabled)
2032 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
2033 }
2034
2035}
2036
2037int atombios_crtc_mode_set(struct drm_crtc *crtc,
2038 struct drm_display_mode *mode,
2039 struct drm_display_mode *adjusted_mode,
2040 int x, int y, struct drm_framebuffer *old_fb)
2041{
2042 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2043 struct drm_device *dev = crtc->dev;
2044 struct radeon_device *rdev = dev->dev_private;
2045 struct radeon_encoder *radeon_encoder =
2046 to_radeon_encoder(radeon_crtc->encoder);
2047 bool is_tvcv = false;
2048
2049 if (radeon_encoder->active_device &
2050 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
2051 is_tvcv = true;
2052
2053 if (!radeon_crtc->adjusted_clock)
2054 return -EINVAL;
2055
2056 atombios_crtc_set_pll(crtc, adjusted_mode);
2057
2058 if (ASIC_IS_DCE4(rdev))
2059 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2060 else if (ASIC_IS_AVIVO(rdev)) {
2061 if (is_tvcv)
2062 atombios_crtc_set_timing(crtc, adjusted_mode);
2063 else
2064 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2065 } else {
2066 atombios_crtc_set_timing(crtc, adjusted_mode);
2067 if (radeon_crtc->crtc_id == 0)
2068 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2069 radeon_legacy_atom_fixup(crtc);
2070 }
2071 atombios_crtc_set_base(crtc, x, y, old_fb);
2072 atombios_overscan_setup(crtc, mode, adjusted_mode);
2073 atombios_scaler_setup(crtc);
2074 radeon_cursor_reset(crtc);
2075
2076 radeon_crtc->hw_mode = *adjusted_mode;
2077
2078 return 0;
2079}
2080
2081static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
2082 const struct drm_display_mode *mode,
2083 struct drm_display_mode *adjusted_mode)
2084{
2085 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2086 struct drm_device *dev = crtc->dev;
2087 struct drm_encoder *encoder;
2088
2089
2090 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2091 if (encoder->crtc == crtc) {
2092 radeon_crtc->encoder = encoder;
2093 radeon_crtc->connector = radeon_get_connector_for_encoder(encoder);
2094 break;
2095 }
2096 }
2097 if ((radeon_crtc->encoder == NULL) || (radeon_crtc->connector == NULL)) {
2098 radeon_crtc->encoder = NULL;
2099 radeon_crtc->connector = NULL;
2100 return false;
2101 }
2102 if (radeon_crtc->encoder) {
2103 struct radeon_encoder *radeon_encoder =
2104 to_radeon_encoder(radeon_crtc->encoder);
2105
2106 radeon_crtc->output_csc = radeon_encoder->output_csc;
2107 }
2108 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
2109 return false;
2110 if (!atombios_crtc_prepare_pll(crtc, adjusted_mode))
2111 return false;
2112
2113 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
2114
2115 if ((radeon_crtc->pll_id == ATOM_PPLL_INVALID) &&
2116 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder)))
2117 return false;
2118
2119 return true;
2120}
2121
2122static void atombios_crtc_prepare(struct drm_crtc *crtc)
2123{
2124 struct drm_device *dev = crtc->dev;
2125 struct radeon_device *rdev = dev->dev_private;
2126
2127
2128 if (ASIC_IS_DCE6(rdev))
2129 atombios_powergate_crtc(crtc, ATOM_DISABLE);
2130
2131 atombios_lock_crtc(crtc, ATOM_ENABLE);
2132 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2133}
2134
2135static void atombios_crtc_commit(struct drm_crtc *crtc)
2136{
2137 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
2138 atombios_lock_crtc(crtc, ATOM_DISABLE);
2139}
2140
2141static void atombios_crtc_disable(struct drm_crtc *crtc)
2142{
2143 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2144 struct drm_device *dev = crtc->dev;
2145 struct radeon_device *rdev = dev->dev_private;
2146 struct radeon_atom_ss ss;
2147 int i;
2148
2149 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2150 if (crtc->primary->fb) {
2151 int r;
2152 struct radeon_framebuffer *radeon_fb;
2153 struct radeon_bo *rbo;
2154
2155 radeon_fb = to_radeon_framebuffer(crtc->primary->fb);
2156 rbo = gem_to_radeon_bo(radeon_fb->obj);
2157 r = radeon_bo_reserve(rbo, false);
2158 if (unlikely(r))
2159 DRM_ERROR("failed to reserve rbo before unpin\n");
2160 else {
2161 radeon_bo_unpin(rbo);
2162 radeon_bo_unreserve(rbo);
2163 }
2164 }
2165
2166 if (ASIC_IS_DCE4(rdev))
2167 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2168 else if (ASIC_IS_AVIVO(rdev))
2169 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2170
2171 if (ASIC_IS_DCE6(rdev))
2172 atombios_powergate_crtc(crtc, ATOM_ENABLE);
2173
2174 for (i = 0; i < rdev->num_crtc; i++) {
2175 if (rdev->mode_info.crtcs[i] &&
2176 rdev->mode_info.crtcs[i]->enabled &&
2177 i != radeon_crtc->crtc_id &&
2178 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) {
2179
2180
2181
2182 goto done;
2183 }
2184 }
2185
2186 switch (radeon_crtc->pll_id) {
2187 case ATOM_PPLL1:
2188 case ATOM_PPLL2:
2189
2190 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2191 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2192 break;
2193 case ATOM_PPLL0:
2194
2195 if ((rdev->family == CHIP_ARUBA) ||
2196 (rdev->family == CHIP_KAVERI) ||
2197 (rdev->family == CHIP_BONAIRE) ||
2198 (rdev->family == CHIP_HAWAII))
2199 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2200 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2201 break;
2202 default:
2203 break;
2204 }
2205done:
2206 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2207 radeon_crtc->adjusted_clock = 0;
2208 radeon_crtc->encoder = NULL;
2209 radeon_crtc->connector = NULL;
2210}
2211
2212static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
2213 .dpms = atombios_crtc_dpms,
2214 .mode_fixup = atombios_crtc_mode_fixup,
2215 .mode_set = atombios_crtc_mode_set,
2216 .mode_set_base = atombios_crtc_set_base,
2217 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
2218 .prepare = atombios_crtc_prepare,
2219 .commit = atombios_crtc_commit,
2220 .disable = atombios_crtc_disable,
2221};
2222
2223void radeon_atombios_init_crtc(struct drm_device *dev,
2224 struct radeon_crtc *radeon_crtc)
2225{
2226 struct radeon_device *rdev = dev->dev_private;
2227
2228 if (ASIC_IS_DCE4(rdev)) {
2229 switch (radeon_crtc->crtc_id) {
2230 case 0:
2231 default:
2232 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
2233 break;
2234 case 1:
2235 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
2236 break;
2237 case 2:
2238 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
2239 break;
2240 case 3:
2241 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
2242 break;
2243 case 4:
2244 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
2245 break;
2246 case 5:
2247 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
2248 break;
2249 }
2250 } else {
2251 if (radeon_crtc->crtc_id == 1)
2252 radeon_crtc->crtc_offset =
2253 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
2254 else
2255 radeon_crtc->crtc_offset = 0;
2256 }
2257 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2258 radeon_crtc->adjusted_clock = 0;
2259 radeon_crtc->encoder = NULL;
2260 radeon_crtc->connector = NULL;
2261 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
2262}
2263