1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27#include <drm/drm_crtc_helper.h>
28#include <drm/drm_fb_helper.h>
29#include <drm/drm_fixed.h>
30#include <drm/drm_fourcc.h>
31#include <drm/drm_vblank.h>
32#include <drm/radeon_drm.h>
33
34#include "radeon.h"
35#include "atom.h"
36#include "atom-bits.h"
37
38static void atombios_overscan_setup(struct drm_crtc *crtc,
39 struct drm_display_mode *mode,
40 struct drm_display_mode *adjusted_mode)
41{
42 struct drm_device *dev = crtc->dev;
43 struct radeon_device *rdev = dev->dev_private;
44 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
45 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
46 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
47 int a1, a2;
48
49 memset(&args, 0, sizeof(args));
50
51 args.ucCRTC = radeon_crtc->crtc_id;
52
53 switch (radeon_crtc->rmx_type) {
54 case RMX_CENTER:
55 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
56 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
57 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
58 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
59 break;
60 case RMX_ASPECT:
61 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
62 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
63
64 if (a1 > a2) {
65 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
66 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
67 } else if (a2 > a1) {
68 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
69 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
70 }
71 break;
72 case RMX_FULL:
73 default:
74 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
75 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
76 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
77 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
78 break;
79 }
80 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
81}
82
83static void atombios_scaler_setup(struct drm_crtc *crtc)
84{
85 struct drm_device *dev = crtc->dev;
86 struct radeon_device *rdev = dev->dev_private;
87 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
88 ENABLE_SCALER_PS_ALLOCATION args;
89 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
90 struct radeon_encoder *radeon_encoder =
91 to_radeon_encoder(radeon_crtc->encoder);
92
93 enum radeon_tv_std tv_std = TV_STD_NTSC;
94 bool is_tv = false, is_cv = false;
95
96 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
97 return;
98
99 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
100 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
101 tv_std = tv_dac->tv_std;
102 is_tv = true;
103 }
104
105 memset(&args, 0, sizeof(args));
106
107 args.ucScaler = radeon_crtc->crtc_id;
108
109 if (is_tv) {
110 switch (tv_std) {
111 case TV_STD_NTSC:
112 default:
113 args.ucTVStandard = ATOM_TV_NTSC;
114 break;
115 case TV_STD_PAL:
116 args.ucTVStandard = ATOM_TV_PAL;
117 break;
118 case TV_STD_PAL_M:
119 args.ucTVStandard = ATOM_TV_PALM;
120 break;
121 case TV_STD_PAL_60:
122 args.ucTVStandard = ATOM_TV_PAL60;
123 break;
124 case TV_STD_NTSC_J:
125 args.ucTVStandard = ATOM_TV_NTSCJ;
126 break;
127 case TV_STD_SCART_PAL:
128 args.ucTVStandard = ATOM_TV_PAL;
129 break;
130 case TV_STD_SECAM:
131 args.ucTVStandard = ATOM_TV_SECAM;
132 break;
133 case TV_STD_PAL_CN:
134 args.ucTVStandard = ATOM_TV_PALCN;
135 break;
136 }
137 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
138 } else if (is_cv) {
139 args.ucTVStandard = ATOM_TV_CV;
140 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
141 } else {
142 switch (radeon_crtc->rmx_type) {
143 case RMX_FULL:
144 args.ucEnable = ATOM_SCALER_EXPANSION;
145 break;
146 case RMX_CENTER:
147 args.ucEnable = ATOM_SCALER_CENTER;
148 break;
149 case RMX_ASPECT:
150 args.ucEnable = ATOM_SCALER_EXPANSION;
151 break;
152 default:
153 if (ASIC_IS_AVIVO(rdev))
154 args.ucEnable = ATOM_SCALER_DISABLE;
155 else
156 args.ucEnable = ATOM_SCALER_CENTER;
157 break;
158 }
159 }
160 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
161 if ((is_tv || is_cv)
162 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
163 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
164 }
165}
166
167static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
168{
169 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
170 struct drm_device *dev = crtc->dev;
171 struct radeon_device *rdev = dev->dev_private;
172 int index =
173 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
174 ENABLE_CRTC_PS_ALLOCATION args;
175
176 memset(&args, 0, sizeof(args));
177
178 args.ucCRTC = radeon_crtc->crtc_id;
179 args.ucEnable = lock;
180
181 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
182}
183
184static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
185{
186 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
187 struct drm_device *dev = crtc->dev;
188 struct radeon_device *rdev = dev->dev_private;
189 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
190 ENABLE_CRTC_PS_ALLOCATION args;
191
192 memset(&args, 0, sizeof(args));
193
194 args.ucCRTC = radeon_crtc->crtc_id;
195 args.ucEnable = state;
196
197 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
198}
199
200static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
201{
202 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
203 struct drm_device *dev = crtc->dev;
204 struct radeon_device *rdev = dev->dev_private;
205 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
206 ENABLE_CRTC_PS_ALLOCATION args;
207
208 memset(&args, 0, sizeof(args));
209
210 args.ucCRTC = radeon_crtc->crtc_id;
211 args.ucEnable = state;
212
213 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
214}
215
216static const u32 vga_control_regs[6] =
217{
218 AVIVO_D1VGA_CONTROL,
219 AVIVO_D2VGA_CONTROL,
220 EVERGREEN_D3VGA_CONTROL,
221 EVERGREEN_D4VGA_CONTROL,
222 EVERGREEN_D5VGA_CONTROL,
223 EVERGREEN_D6VGA_CONTROL,
224};
225
226static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
227{
228 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
229 struct drm_device *dev = crtc->dev;
230 struct radeon_device *rdev = dev->dev_private;
231 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
232 BLANK_CRTC_PS_ALLOCATION args;
233 u32 vga_control = 0;
234
235 memset(&args, 0, sizeof(args));
236
237 if (ASIC_IS_DCE8(rdev)) {
238 vga_control = RREG32(vga_control_regs[radeon_crtc->crtc_id]);
239 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control | 1);
240 }
241
242 args.ucCRTC = radeon_crtc->crtc_id;
243 args.ucBlanking = state;
244
245 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
246
247 if (ASIC_IS_DCE8(rdev)) {
248 WREG32(vga_control_regs[radeon_crtc->crtc_id], vga_control);
249 }
250}
251
252static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
253{
254 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
255 struct drm_device *dev = crtc->dev;
256 struct radeon_device *rdev = dev->dev_private;
257 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
258 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
259
260 memset(&args, 0, sizeof(args));
261
262 args.ucDispPipeId = radeon_crtc->crtc_id;
263 args.ucEnable = state;
264
265 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
266}
267
268void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
269{
270 struct drm_device *dev = crtc->dev;
271 struct radeon_device *rdev = dev->dev_private;
272 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
273
274 switch (mode) {
275 case DRM_MODE_DPMS_ON:
276 radeon_crtc->enabled = true;
277 atombios_enable_crtc(crtc, ATOM_ENABLE);
278 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
279 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
280 atombios_blank_crtc(crtc, ATOM_DISABLE);
281 if (dev->num_crtcs > radeon_crtc->crtc_id)
282 drm_crtc_vblank_on(crtc);
283 radeon_crtc_load_lut(crtc);
284 break;
285 case DRM_MODE_DPMS_STANDBY:
286 case DRM_MODE_DPMS_SUSPEND:
287 case DRM_MODE_DPMS_OFF:
288 if (dev->num_crtcs > radeon_crtc->crtc_id)
289 drm_crtc_vblank_off(crtc);
290 if (radeon_crtc->enabled)
291 atombios_blank_crtc(crtc, ATOM_ENABLE);
292 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
293 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
294 atombios_enable_crtc(crtc, ATOM_DISABLE);
295 radeon_crtc->enabled = false;
296 break;
297 }
298
299 radeon_pm_compute_clocks(rdev);
300}
301
302static void
303atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
304 struct drm_display_mode *mode)
305{
306 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
307 struct drm_device *dev = crtc->dev;
308 struct radeon_device *rdev = dev->dev_private;
309 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
310 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
311 u16 misc = 0;
312
313 memset(&args, 0, sizeof(args));
314 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
315 args.usH_Blanking_Time =
316 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
317 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
318 args.usV_Blanking_Time =
319 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
320 args.usH_SyncOffset =
321 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
322 args.usH_SyncWidth =
323 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
324 args.usV_SyncOffset =
325 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
326 args.usV_SyncWidth =
327 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
328 args.ucH_Border = radeon_crtc->h_border;
329 args.ucV_Border = radeon_crtc->v_border;
330
331 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
332 misc |= ATOM_VSYNC_POLARITY;
333 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
334 misc |= ATOM_HSYNC_POLARITY;
335 if (mode->flags & DRM_MODE_FLAG_CSYNC)
336 misc |= ATOM_COMPOSITESYNC;
337 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
338 misc |= ATOM_INTERLACE;
339 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
340 misc |= ATOM_DOUBLE_CLOCK_MODE;
341 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
342 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
343
344 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
345 args.ucCRTC = radeon_crtc->crtc_id;
346
347 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
348}
349
350static void atombios_crtc_set_timing(struct drm_crtc *crtc,
351 struct drm_display_mode *mode)
352{
353 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
354 struct drm_device *dev = crtc->dev;
355 struct radeon_device *rdev = dev->dev_private;
356 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
357 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
358 u16 misc = 0;
359
360 memset(&args, 0, sizeof(args));
361 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
362 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
363 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
364 args.usH_SyncWidth =
365 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
366 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
367 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
368 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
369 args.usV_SyncWidth =
370 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
371
372 args.ucOverscanRight = radeon_crtc->h_border;
373 args.ucOverscanLeft = radeon_crtc->h_border;
374 args.ucOverscanBottom = radeon_crtc->v_border;
375 args.ucOverscanTop = radeon_crtc->v_border;
376
377 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
378 misc |= ATOM_VSYNC_POLARITY;
379 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
380 misc |= ATOM_HSYNC_POLARITY;
381 if (mode->flags & DRM_MODE_FLAG_CSYNC)
382 misc |= ATOM_COMPOSITESYNC;
383 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
384 misc |= ATOM_INTERLACE;
385 if (mode->flags & DRM_MODE_FLAG_DBLCLK)
386 misc |= ATOM_DOUBLE_CLOCK_MODE;
387 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
388 misc |= ATOM_H_REPLICATIONBY2 | ATOM_V_REPLICATIONBY2;
389
390 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
391 args.ucCRTC = radeon_crtc->crtc_id;
392
393 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
394}
395
396static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
397{
398 u32 ss_cntl;
399
400 if (ASIC_IS_DCE4(rdev)) {
401 switch (pll_id) {
402 case ATOM_PPLL1:
403 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
404 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
405 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
406 break;
407 case ATOM_PPLL2:
408 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
409 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
410 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
411 break;
412 case ATOM_DCPLL:
413 case ATOM_PPLL_INVALID:
414 return;
415 }
416 } else if (ASIC_IS_AVIVO(rdev)) {
417 switch (pll_id) {
418 case ATOM_PPLL1:
419 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
420 ss_cntl &= ~1;
421 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
422 break;
423 case ATOM_PPLL2:
424 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
425 ss_cntl &= ~1;
426 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
427 break;
428 case ATOM_DCPLL:
429 case ATOM_PPLL_INVALID:
430 return;
431 }
432 }
433}
434
435
436union atom_enable_ss {
437 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
438 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
439 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
440 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
441 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
442};
443
444static void atombios_crtc_program_ss(struct radeon_device *rdev,
445 int enable,
446 int pll_id,
447 int crtc_id,
448 struct radeon_atom_ss *ss)
449{
450 unsigned i;
451 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
452 union atom_enable_ss args;
453
454 if (enable) {
455
456
457
458
459
460 if (ss->percentage == 0)
461 return;
462 if (ss->type & ATOM_EXTERNAL_SS_MASK)
463 return;
464 } else {
465 for (i = 0; i < rdev->num_crtc; i++) {
466 if (rdev->mode_info.crtcs[i] &&
467 rdev->mode_info.crtcs[i]->enabled &&
468 i != crtc_id &&
469 pll_id == rdev->mode_info.crtcs[i]->pll_id) {
470
471
472
473
474 return;
475 }
476 }
477 }
478
479 memset(&args, 0, sizeof(args));
480
481 if (ASIC_IS_DCE5(rdev)) {
482 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
483 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
484 switch (pll_id) {
485 case ATOM_PPLL1:
486 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
487 break;
488 case ATOM_PPLL2:
489 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
490 break;
491 case ATOM_DCPLL:
492 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
493 break;
494 case ATOM_PPLL_INVALID:
495 return;
496 }
497 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
498 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
499 args.v3.ucEnable = enable;
500 } else if (ASIC_IS_DCE4(rdev)) {
501 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
502 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
503 switch (pll_id) {
504 case ATOM_PPLL1:
505 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
506 break;
507 case ATOM_PPLL2:
508 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
509 break;
510 case ATOM_DCPLL:
511 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
512 break;
513 case ATOM_PPLL_INVALID:
514 return;
515 }
516 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
517 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
518 args.v2.ucEnable = enable;
519 } else if (ASIC_IS_DCE3(rdev)) {
520 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
521 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
522 args.v1.ucSpreadSpectrumStep = ss->step;
523 args.v1.ucSpreadSpectrumDelay = ss->delay;
524 args.v1.ucSpreadSpectrumRange = ss->range;
525 args.v1.ucPpll = pll_id;
526 args.v1.ucEnable = enable;
527 } else if (ASIC_IS_AVIVO(rdev)) {
528 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
529 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
530 atombios_disable_ss(rdev, pll_id);
531 return;
532 }
533 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
534 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
535 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
536 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
537 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
538 args.lvds_ss_2.ucEnable = enable;
539 } else {
540 if (enable == ATOM_DISABLE) {
541 atombios_disable_ss(rdev, pll_id);
542 return;
543 }
544 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
545 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
546 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
547 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
548 args.lvds_ss.ucEnable = enable;
549 }
550 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
551}
552
553union adjust_pixel_clock {
554 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
555 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
556};
557
558static u32 atombios_adjust_pll(struct drm_crtc *crtc,
559 struct drm_display_mode *mode)
560{
561 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
562 struct drm_device *dev = crtc->dev;
563 struct radeon_device *rdev = dev->dev_private;
564 struct drm_encoder *encoder = radeon_crtc->encoder;
565 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
566 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
567 u32 adjusted_clock = mode->clock;
568 int encoder_mode = atombios_get_encoder_mode(encoder);
569 u32 dp_clock = mode->clock;
570 u32 clock = mode->clock;
571 int bpc = radeon_crtc->bpc;
572 bool is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
573
574
575 radeon_crtc->pll_flags = 0;
576
577 if (ASIC_IS_AVIVO(rdev)) {
578 if ((rdev->family == CHIP_RS600) ||
579 (rdev->family == CHIP_RS690) ||
580 (rdev->family == CHIP_RS740))
581 radeon_crtc->pll_flags |= (
582 RADEON_PLL_PREFER_CLOSEST_LOWER);
583
584 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)
585 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
586 else
587 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
588
589 if (rdev->family < CHIP_RV770)
590 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
591
592 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
593 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
594
595 if (((rdev->family == CHIP_RS780) || (rdev->family == CHIP_RS880))
596 && !radeon_crtc->ss_enabled)
597 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
598 if (ASIC_IS_DCE32(rdev) && mode->clock > 165000)
599 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
600 } else {
601 radeon_crtc->pll_flags |= RADEON_PLL_LEGACY;
602
603 if (mode->clock > 200000)
604 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
605 else
606 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
607 }
608
609 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
610 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
611 if (connector) {
612 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
613 struct radeon_connector_atom_dig *dig_connector =
614 radeon_connector->con_priv;
615
616 dp_clock = dig_connector->dp_clock;
617 }
618 }
619
620 if (radeon_encoder->is_mst_encoder) {
621 struct radeon_encoder_mst *mst_enc = radeon_encoder->enc_priv;
622 struct radeon_connector_atom_dig *dig_connector = mst_enc->connector->con_priv;
623
624 dp_clock = dig_connector->dp_clock;
625 }
626
627
628 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
629 if (radeon_crtc->ss_enabled) {
630 if (radeon_crtc->ss.refdiv) {
631 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
632 radeon_crtc->pll_reference_div = radeon_crtc->ss.refdiv;
633 if (ASIC_IS_AVIVO(rdev) &&
634 rdev->family != CHIP_RS780 &&
635 rdev->family != CHIP_RS880)
636 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
637 }
638 }
639 }
640
641 if (ASIC_IS_AVIVO(rdev)) {
642
643 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
644 adjusted_clock = mode->clock * 2;
645 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
646 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
647 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
648 radeon_crtc->pll_flags |= RADEON_PLL_IS_LCD;
649 } else {
650 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
651 radeon_crtc->pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
652 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
653 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
654 }
655
656
657 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
658 switch (bpc) {
659 case 8:
660 default:
661 break;
662 case 10:
663 clock = (clock * 5) / 4;
664 break;
665 case 12:
666 clock = (clock * 3) / 2;
667 break;
668 case 16:
669 clock = clock * 2;
670 break;
671 }
672 }
673
674
675
676
677
678 if (ASIC_IS_DCE3(rdev)) {
679 union adjust_pixel_clock args;
680 u8 frev, crev;
681 int index;
682
683 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
684 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
685 &crev))
686 return adjusted_clock;
687
688 memset(&args, 0, sizeof(args));
689
690 switch (frev) {
691 case 1:
692 switch (crev) {
693 case 1:
694 case 2:
695 args.v1.usPixelClock = cpu_to_le16(clock / 10);
696 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
697 args.v1.ucEncodeMode = encoder_mode;
698 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
699 args.v1.ucConfig |=
700 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
701
702 atom_execute_table(rdev->mode_info.atom_context,
703 index, (uint32_t *)&args);
704 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
705 break;
706 case 3:
707 args.v3.sInput.usPixelClock = cpu_to_le16(clock / 10);
708 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
709 args.v3.sInput.ucEncodeMode = encoder_mode;
710 args.v3.sInput.ucDispPllConfig = 0;
711 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
712 args.v3.sInput.ucDispPllConfig |=
713 DISPPLL_CONFIG_SS_ENABLE;
714 if (ENCODER_MODE_IS_DP(encoder_mode)) {
715 args.v3.sInput.ucDispPllConfig |=
716 DISPPLL_CONFIG_COHERENT_MODE;
717
718 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
719 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
720 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
721 if (dig->coherent_mode)
722 args.v3.sInput.ucDispPllConfig |=
723 DISPPLL_CONFIG_COHERENT_MODE;
724 if (is_duallink)
725 args.v3.sInput.ucDispPllConfig |=
726 DISPPLL_CONFIG_DUAL_LINK;
727 }
728 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
729 ENCODER_OBJECT_ID_NONE)
730 args.v3.sInput.ucExtTransmitterID =
731 radeon_encoder_get_dp_bridge_encoder_id(encoder);
732 else
733 args.v3.sInput.ucExtTransmitterID = 0;
734
735 atom_execute_table(rdev->mode_info.atom_context,
736 index, (uint32_t *)&args);
737 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
738 if (args.v3.sOutput.ucRefDiv) {
739 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
740 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
741 radeon_crtc->pll_reference_div = args.v3.sOutput.ucRefDiv;
742 }
743 if (args.v3.sOutput.ucPostDiv) {
744 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
745 radeon_crtc->pll_flags |= RADEON_PLL_USE_POST_DIV;
746 radeon_crtc->pll_post_div = args.v3.sOutput.ucPostDiv;
747 }
748 break;
749 default:
750 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
751 return adjusted_clock;
752 }
753 break;
754 default:
755 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
756 return adjusted_clock;
757 }
758 }
759 return adjusted_clock;
760}
761
762union set_pixel_clock {
763 SET_PIXEL_CLOCK_PS_ALLOCATION base;
764 PIXEL_CLOCK_PARAMETERS v1;
765 PIXEL_CLOCK_PARAMETERS_V2 v2;
766 PIXEL_CLOCK_PARAMETERS_V3 v3;
767 PIXEL_CLOCK_PARAMETERS_V5 v5;
768 PIXEL_CLOCK_PARAMETERS_V6 v6;
769};
770
771
772
773
774static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
775 u32 dispclk)
776{
777 u8 frev, crev;
778 int index;
779 union set_pixel_clock args;
780
781 memset(&args, 0, sizeof(args));
782
783 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
784 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
785 &crev))
786 return;
787
788 switch (frev) {
789 case 1:
790 switch (crev) {
791 case 5:
792
793
794
795 args.v5.ucCRTC = ATOM_CRTC_INVALID;
796 args.v5.usPixelClock = cpu_to_le16(dispclk);
797 args.v5.ucPpll = ATOM_DCPLL;
798 break;
799 case 6:
800
801
802
803 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
804 if (ASIC_IS_DCE61(rdev) || ASIC_IS_DCE8(rdev))
805 args.v6.ucPpll = ATOM_EXT_PLL1;
806 else if (ASIC_IS_DCE6(rdev))
807 args.v6.ucPpll = ATOM_PPLL0;
808 else
809 args.v6.ucPpll = ATOM_DCPLL;
810 break;
811 default:
812 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
813 return;
814 }
815 break;
816 default:
817 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
818 return;
819 }
820 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
821}
822
823static void atombios_crtc_program_pll(struct drm_crtc *crtc,
824 u32 crtc_id,
825 int pll_id,
826 u32 encoder_mode,
827 u32 encoder_id,
828 u32 clock,
829 u32 ref_div,
830 u32 fb_div,
831 u32 frac_fb_div,
832 u32 post_div,
833 int bpc,
834 bool ss_enabled,
835 struct radeon_atom_ss *ss)
836{
837 struct drm_device *dev = crtc->dev;
838 struct radeon_device *rdev = dev->dev_private;
839 u8 frev, crev;
840 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
841 union set_pixel_clock args;
842
843 memset(&args, 0, sizeof(args));
844
845 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
846 &crev))
847 return;
848
849 switch (frev) {
850 case 1:
851 switch (crev) {
852 case 1:
853 if (clock == ATOM_DISABLE)
854 return;
855 args.v1.usPixelClock = cpu_to_le16(clock / 10);
856 args.v1.usRefDiv = cpu_to_le16(ref_div);
857 args.v1.usFbDiv = cpu_to_le16(fb_div);
858 args.v1.ucFracFbDiv = frac_fb_div;
859 args.v1.ucPostDiv = post_div;
860 args.v1.ucPpll = pll_id;
861 args.v1.ucCRTC = crtc_id;
862 args.v1.ucRefDivSrc = 1;
863 break;
864 case 2:
865 args.v2.usPixelClock = cpu_to_le16(clock / 10);
866 args.v2.usRefDiv = cpu_to_le16(ref_div);
867 args.v2.usFbDiv = cpu_to_le16(fb_div);
868 args.v2.ucFracFbDiv = frac_fb_div;
869 args.v2.ucPostDiv = post_div;
870 args.v2.ucPpll = pll_id;
871 args.v2.ucCRTC = crtc_id;
872 args.v2.ucRefDivSrc = 1;
873 break;
874 case 3:
875 args.v3.usPixelClock = cpu_to_le16(clock / 10);
876 args.v3.usRefDiv = cpu_to_le16(ref_div);
877 args.v3.usFbDiv = cpu_to_le16(fb_div);
878 args.v3.ucFracFbDiv = frac_fb_div;
879 args.v3.ucPostDiv = post_div;
880 args.v3.ucPpll = pll_id;
881 if (crtc_id == ATOM_CRTC2)
882 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2;
883 else
884 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1;
885 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
886 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
887 args.v3.ucTransmitterId = encoder_id;
888 args.v3.ucEncoderMode = encoder_mode;
889 break;
890 case 5:
891 args.v5.ucCRTC = crtc_id;
892 args.v5.usPixelClock = cpu_to_le16(clock / 10);
893 args.v5.ucRefDiv = ref_div;
894 args.v5.usFbDiv = cpu_to_le16(fb_div);
895 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
896 args.v5.ucPostDiv = post_div;
897 args.v5.ucMiscInfo = 0;
898 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
899 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
900 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
901 switch (bpc) {
902 case 8:
903 default:
904 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
905 break;
906 case 10:
907
908 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_32BPP;
909 break;
910 case 12:
911
912 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
913 break;
914 }
915 }
916 args.v5.ucTransmitterID = encoder_id;
917 args.v5.ucEncoderMode = encoder_mode;
918 args.v5.ucPpll = pll_id;
919 break;
920 case 6:
921 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
922 args.v6.ucRefDiv = ref_div;
923 args.v6.usFbDiv = cpu_to_le16(fb_div);
924 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
925 args.v6.ucPostDiv = post_div;
926 args.v6.ucMiscInfo = 0;
927 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
928 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
929 if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
930 switch (bpc) {
931 case 8:
932 default:
933 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
934 break;
935 case 10:
936 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP_V6;
937 break;
938 case 12:
939 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP_V6;
940 break;
941 case 16:
942 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
943 break;
944 }
945 }
946 args.v6.ucTransmitterID = encoder_id;
947 args.v6.ucEncoderMode = encoder_mode;
948 args.v6.ucPpll = pll_id;
949 break;
950 default:
951 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
952 return;
953 }
954 break;
955 default:
956 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
957 return;
958 }
959
960 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
961}
962
963static bool atombios_crtc_prepare_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
964{
965 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
966 struct drm_device *dev = crtc->dev;
967 struct radeon_device *rdev = dev->dev_private;
968 struct radeon_encoder *radeon_encoder =
969 to_radeon_encoder(radeon_crtc->encoder);
970 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
971
972 radeon_crtc->bpc = 8;
973 radeon_crtc->ss_enabled = false;
974
975 if (radeon_encoder->is_mst_encoder) {
976 radeon_dp_mst_prepare_pll(crtc, mode);
977 } else if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
978 (radeon_encoder_get_dp_bridge_encoder_id(radeon_crtc->encoder) != ENCODER_OBJECT_ID_NONE)) {
979 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
980 struct drm_connector *connector =
981 radeon_get_connector_for_encoder(radeon_crtc->encoder);
982 struct radeon_connector *radeon_connector =
983 to_radeon_connector(connector);
984 struct radeon_connector_atom_dig *dig_connector =
985 radeon_connector->con_priv;
986 int dp_clock;
987
988
989 radeon_connector->pixelclock_for_modeset = mode->clock;
990 radeon_crtc->bpc = radeon_get_monitor_bpc(connector);
991
992 switch (encoder_mode) {
993 case ATOM_ENCODER_MODE_DP_MST:
994 case ATOM_ENCODER_MODE_DP:
995
996 dp_clock = dig_connector->dp_clock / 10;
997 if (ASIC_IS_DCE4(rdev))
998 radeon_crtc->ss_enabled =
999 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss,
1000 ASIC_INTERNAL_SS_ON_DP,
1001 dp_clock);
1002 else {
1003 if (dp_clock == 16200) {
1004 radeon_crtc->ss_enabled =
1005 radeon_atombios_get_ppll_ss_info(rdev,
1006 &radeon_crtc->ss,
1007 ATOM_DP_SS_ID2);
1008 if (!radeon_crtc->ss_enabled)
1009 radeon_crtc->ss_enabled =
1010 radeon_atombios_get_ppll_ss_info(rdev,
1011 &radeon_crtc->ss,
1012 ATOM_DP_SS_ID1);
1013 } else {
1014 radeon_crtc->ss_enabled =
1015 radeon_atombios_get_ppll_ss_info(rdev,
1016 &radeon_crtc->ss,
1017 ATOM_DP_SS_ID1);
1018 }
1019
1020 radeon_crtc->ss_enabled = false;
1021 }
1022 break;
1023 case ATOM_ENCODER_MODE_LVDS:
1024 if (ASIC_IS_DCE4(rdev))
1025 radeon_crtc->ss_enabled =
1026 radeon_atombios_get_asic_ss_info(rdev,
1027 &radeon_crtc->ss,
1028 dig->lcd_ss_id,
1029 mode->clock / 10);
1030 else
1031 radeon_crtc->ss_enabled =
1032 radeon_atombios_get_ppll_ss_info(rdev,
1033 &radeon_crtc->ss,
1034 dig->lcd_ss_id);
1035 break;
1036 case ATOM_ENCODER_MODE_DVI:
1037 if (ASIC_IS_DCE4(rdev))
1038 radeon_crtc->ss_enabled =
1039 radeon_atombios_get_asic_ss_info(rdev,
1040 &radeon_crtc->ss,
1041 ASIC_INTERNAL_SS_ON_TMDS,
1042 mode->clock / 10);
1043 break;
1044 case ATOM_ENCODER_MODE_HDMI:
1045 if (ASIC_IS_DCE4(rdev))
1046 radeon_crtc->ss_enabled =
1047 radeon_atombios_get_asic_ss_info(rdev,
1048 &radeon_crtc->ss,
1049 ASIC_INTERNAL_SS_ON_HDMI,
1050 mode->clock / 10);
1051 break;
1052 default:
1053 break;
1054 }
1055 }
1056
1057
1058 radeon_crtc->adjusted_clock = atombios_adjust_pll(crtc, mode);
1059
1060 return true;
1061}
1062
1063static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
1064{
1065 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1066 struct drm_device *dev = crtc->dev;
1067 struct radeon_device *rdev = dev->dev_private;
1068 struct radeon_encoder *radeon_encoder =
1069 to_radeon_encoder(radeon_crtc->encoder);
1070 u32 pll_clock = mode->clock;
1071 u32 clock = mode->clock;
1072 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
1073 struct radeon_pll *pll;
1074 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
1075
1076
1077 if (ASIC_IS_DCE5(rdev) &&
1078 (encoder_mode == ATOM_ENCODER_MODE_HDMI) &&
1079 (radeon_crtc->bpc > 8))
1080 clock = radeon_crtc->adjusted_clock;
1081
1082 switch (radeon_crtc->pll_id) {
1083 case ATOM_PPLL1:
1084 pll = &rdev->clock.p1pll;
1085 break;
1086 case ATOM_PPLL2:
1087 pll = &rdev->clock.p2pll;
1088 break;
1089 case ATOM_DCPLL:
1090 case ATOM_PPLL_INVALID:
1091 default:
1092 pll = &rdev->clock.dcpll;
1093 break;
1094 }
1095
1096
1097 pll->flags = radeon_crtc->pll_flags;
1098 pll->reference_div = radeon_crtc->pll_reference_div;
1099 pll->post_div = radeon_crtc->pll_post_div;
1100
1101 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1102
1103 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1104 &fb_div, &frac_fb_div, &ref_div, &post_div);
1105 else if (ASIC_IS_AVIVO(rdev))
1106 radeon_compute_pll_avivo(pll, radeon_crtc->adjusted_clock, &pll_clock,
1107 &fb_div, &frac_fb_div, &ref_div, &post_div);
1108 else
1109 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1110 &fb_div, &frac_fb_div, &ref_div, &post_div);
1111
1112 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id,
1113 radeon_crtc->crtc_id, &radeon_crtc->ss);
1114
1115 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1116 encoder_mode, radeon_encoder->encoder_id, clock,
1117 ref_div, fb_div, frac_fb_div, post_div,
1118 radeon_crtc->bpc, radeon_crtc->ss_enabled, &radeon_crtc->ss);
1119
1120 if (radeon_crtc->ss_enabled) {
1121
1122 if (ASIC_IS_DCE4(rdev)) {
1123 u32 step_size;
1124 u32 amount = (((fb_div * 10) + frac_fb_div) *
1125 (u32)radeon_crtc->ss.percentage) /
1126 (100 * (u32)radeon_crtc->ss.percentage_divider);
1127 radeon_crtc->ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1128 radeon_crtc->ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1129 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1130 if (radeon_crtc->ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1131 step_size = (4 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1132 (125 * 25 * pll->reference_freq / 100);
1133 else
1134 step_size = (2 * amount * ref_div * ((u32)radeon_crtc->ss.rate * 2048)) /
1135 (125 * 25 * pll->reference_freq / 100);
1136 radeon_crtc->ss.step = step_size;
1137 }
1138
1139 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id,
1140 radeon_crtc->crtc_id, &radeon_crtc->ss);
1141 }
1142}
1143
1144static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1145 struct drm_framebuffer *fb,
1146 int x, int y, int atomic)
1147{
1148 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1149 struct drm_device *dev = crtc->dev;
1150 struct radeon_device *rdev = dev->dev_private;
1151 struct drm_framebuffer *target_fb;
1152 struct drm_gem_object *obj;
1153 struct radeon_bo *rbo;
1154 uint64_t fb_location;
1155 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1156 unsigned bankw, bankh, mtaspect, tile_split;
1157 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1158 u32 tmp, viewport_w, viewport_h;
1159 int r;
1160 bool bypass_lut = false;
1161 struct drm_format_name_buf format_name;
1162
1163
1164 if (!atomic && !crtc->primary->fb) {
1165 DRM_DEBUG_KMS("No FB bound\n");
1166 return 0;
1167 }
1168
1169 if (atomic)
1170 target_fb = fb;
1171 else
1172 target_fb = crtc->primary->fb;
1173
1174
1175
1176
1177 obj = target_fb->obj[0];
1178 rbo = gem_to_radeon_bo(obj);
1179 r = radeon_bo_reserve(rbo, false);
1180 if (unlikely(r != 0))
1181 return r;
1182
1183 if (atomic)
1184 fb_location = radeon_bo_gpu_offset(rbo);
1185 else {
1186 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1187 if (unlikely(r != 0)) {
1188 radeon_bo_unreserve(rbo);
1189 return -EINVAL;
1190 }
1191 }
1192
1193 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1194 radeon_bo_unreserve(rbo);
1195
1196 switch (target_fb->format->format) {
1197 case DRM_FORMAT_C8:
1198 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1199 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1200 break;
1201 case DRM_FORMAT_XRGB4444:
1202 case DRM_FORMAT_ARGB4444:
1203 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1204 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB4444));
1205#ifdef __BIG_ENDIAN
1206 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1207#endif
1208 break;
1209 case DRM_FORMAT_XRGB1555:
1210 case DRM_FORMAT_ARGB1555:
1211 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1212 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1213#ifdef __BIG_ENDIAN
1214 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1215#endif
1216 break;
1217 case DRM_FORMAT_BGRX5551:
1218 case DRM_FORMAT_BGRA5551:
1219 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1220 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA5551));
1221#ifdef __BIG_ENDIAN
1222 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1223#endif
1224 break;
1225 case DRM_FORMAT_RGB565:
1226 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1227 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1228#ifdef __BIG_ENDIAN
1229 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1230#endif
1231 break;
1232 case DRM_FORMAT_XRGB8888:
1233 case DRM_FORMAT_ARGB8888:
1234 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1235 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1236#ifdef __BIG_ENDIAN
1237 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1238#endif
1239 break;
1240 case DRM_FORMAT_XRGB2101010:
1241 case DRM_FORMAT_ARGB2101010:
1242 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1243 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB2101010));
1244#ifdef __BIG_ENDIAN
1245 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1246#endif
1247
1248 bypass_lut = true;
1249 break;
1250 case DRM_FORMAT_BGRX1010102:
1251 case DRM_FORMAT_BGRA1010102:
1252 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1253 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_BGRA1010102));
1254#ifdef __BIG_ENDIAN
1255 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1256#endif
1257
1258 bypass_lut = true;
1259 break;
1260 case DRM_FORMAT_XBGR8888:
1261 case DRM_FORMAT_ABGR8888:
1262 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1263 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1264 fb_swap = (EVERGREEN_GRPH_RED_CROSSBAR(EVERGREEN_GRPH_RED_SEL_B) |
1265 EVERGREEN_GRPH_BLUE_CROSSBAR(EVERGREEN_GRPH_BLUE_SEL_R));
1266#ifdef __BIG_ENDIAN
1267 fb_swap |= EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1268#endif
1269 break;
1270 default:
1271 DRM_ERROR("Unsupported screen format %s\n",
1272 drm_get_format_name(target_fb->format->format, &format_name));
1273 return -EINVAL;
1274 }
1275
1276 if (tiling_flags & RADEON_TILING_MACRO) {
1277 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1278
1279
1280 if (rdev->family >= CHIP_TAHITI) {
1281 unsigned index, num_banks;
1282
1283 if (rdev->family >= CHIP_BONAIRE) {
1284 unsigned tileb, tile_split_bytes;
1285
1286
1287 tile_split_bytes = 64 << tile_split;
1288 tileb = 8 * 8 * target_fb->format->cpp[0];
1289 tileb = min(tile_split_bytes, tileb);
1290
1291 for (index = 0; tileb > 64; index++)
1292 tileb >>= 1;
1293
1294 if (index >= 16) {
1295 DRM_ERROR("Wrong screen bpp (%u) or tile split (%u)\n",
1296 target_fb->format->cpp[0] * 8,
1297 tile_split);
1298 return -EINVAL;
1299 }
1300
1301 num_banks = (rdev->config.cik.macrotile_mode_array[index] >> 6) & 0x3;
1302 } else {
1303 switch (target_fb->format->cpp[0] * 8) {
1304 case 8:
1305 index = 10;
1306 break;
1307 case 16:
1308 index = SI_TILE_MODE_COLOR_2D_SCANOUT_16BPP;
1309 break;
1310 default:
1311 case 32:
1312 index = SI_TILE_MODE_COLOR_2D_SCANOUT_32BPP;
1313 break;
1314 }
1315
1316 num_banks = (rdev->config.si.tile_mode_array[index] >> 20) & 0x3;
1317 }
1318
1319 fb_format |= EVERGREEN_GRPH_NUM_BANKS(num_banks);
1320 } else {
1321
1322 if (rdev->family >= CHIP_CAYMAN)
1323 tmp = rdev->config.cayman.tile_config;
1324 else
1325 tmp = rdev->config.evergreen.tile_config;
1326
1327 switch ((tmp & 0xf0) >> 4) {
1328 case 0:
1329 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1330 break;
1331 case 1:
1332 default:
1333 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1334 break;
1335 case 2:
1336 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1337 break;
1338 }
1339 }
1340
1341 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1342 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1343 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1344 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1345 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1346 if (rdev->family >= CHIP_BONAIRE) {
1347
1348 fb_format |= CIK_GRPH_MICRO_TILE_MODE(CIK_DISPLAY_MICRO_TILING);
1349 }
1350 } else if (tiling_flags & RADEON_TILING_MICRO)
1351 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1352
1353 if (rdev->family >= CHIP_BONAIRE) {
1354
1355
1356
1357 u32 pipe_config = (rdev->config.cik.tile_mode_array[10] >> 6) & 0x1f;
1358
1359 fb_format |= CIK_GRPH_PIPE_CONFIG(pipe_config);
1360 } else if ((rdev->family == CHIP_TAHITI) ||
1361 (rdev->family == CHIP_PITCAIRN))
1362 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1363 else if ((rdev->family == CHIP_VERDE) ||
1364 (rdev->family == CHIP_OLAND) ||
1365 (rdev->family == CHIP_HAINAN))
1366 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1367
1368 switch (radeon_crtc->crtc_id) {
1369 case 0:
1370 WREG32(AVIVO_D1VGA_CONTROL, 0);
1371 break;
1372 case 1:
1373 WREG32(AVIVO_D2VGA_CONTROL, 0);
1374 break;
1375 case 2:
1376 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1377 break;
1378 case 3:
1379 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1380 break;
1381 case 4:
1382 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1383 break;
1384 case 5:
1385 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1386 break;
1387 default:
1388 break;
1389 }
1390
1391
1392
1393
1394 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1395
1396 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1397 upper_32_bits(fb_location));
1398 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1399 upper_32_bits(fb_location));
1400 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1401 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1402 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1403 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1404 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1405 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1406
1407
1408
1409
1410
1411
1412 WREG32_P(EVERGREEN_GRPH_LUT_10BIT_BYPASS_CONTROL + radeon_crtc->crtc_offset,
1413 (bypass_lut ? EVERGREEN_LUT_10BIT_BYPASS_EN : 0),
1414 ~EVERGREEN_LUT_10BIT_BYPASS_EN);
1415
1416 if (bypass_lut)
1417 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1418
1419 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1420 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1421 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1422 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1423 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1424 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1425
1426 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0];
1427 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1428 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1429
1430 if (rdev->family >= CHIP_BONAIRE)
1431 WREG32(CIK_LB_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1432 target_fb->height);
1433 else
1434 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1435 target_fb->height);
1436 x &= ~3;
1437 y &= ~1;
1438 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1439 (x << 16) | y);
1440 viewport_w = crtc->mode.hdisplay;
1441 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1442 if ((rdev->family >= CHIP_BONAIRE) &&
1443 (crtc->mode.flags & DRM_MODE_FLAG_INTERLACE))
1444 viewport_h *= 2;
1445 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1446 (viewport_w << 16) | viewport_h);
1447
1448
1449 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1450
1451 if (!atomic && fb && fb != crtc->primary->fb) {
1452 rbo = gem_to_radeon_bo(fb->obj[0]);
1453 r = radeon_bo_reserve(rbo, false);
1454 if (unlikely(r != 0))
1455 return r;
1456 radeon_bo_unpin(rbo);
1457 radeon_bo_unreserve(rbo);
1458 }
1459
1460
1461 radeon_bandwidth_update(rdev);
1462
1463 return 0;
1464}
1465
1466static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1467 struct drm_framebuffer *fb,
1468 int x, int y, int atomic)
1469{
1470 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1471 struct drm_device *dev = crtc->dev;
1472 struct radeon_device *rdev = dev->dev_private;
1473 struct drm_gem_object *obj;
1474 struct radeon_bo *rbo;
1475 struct drm_framebuffer *target_fb;
1476 uint64_t fb_location;
1477 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1478 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1479 u32 viewport_w, viewport_h;
1480 int r;
1481 bool bypass_lut = false;
1482 struct drm_format_name_buf format_name;
1483
1484
1485 if (!atomic && !crtc->primary->fb) {
1486 DRM_DEBUG_KMS("No FB bound\n");
1487 return 0;
1488 }
1489
1490 if (atomic)
1491 target_fb = fb;
1492 else
1493 target_fb = crtc->primary->fb;
1494
1495 obj = target_fb->obj[0];
1496 rbo = gem_to_radeon_bo(obj);
1497 r = radeon_bo_reserve(rbo, false);
1498 if (unlikely(r != 0))
1499 return r;
1500
1501
1502
1503
1504 if (atomic)
1505 fb_location = radeon_bo_gpu_offset(rbo);
1506 else {
1507 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1508 if (unlikely(r != 0)) {
1509 radeon_bo_unreserve(rbo);
1510 return -EINVAL;
1511 }
1512 }
1513 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1514 radeon_bo_unreserve(rbo);
1515
1516 switch (target_fb->format->format) {
1517 case DRM_FORMAT_C8:
1518 fb_format =
1519 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1520 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1521 break;
1522 case DRM_FORMAT_XRGB4444:
1523 case DRM_FORMAT_ARGB4444:
1524 fb_format =
1525 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1526 AVIVO_D1GRPH_CONTROL_16BPP_ARGB4444;
1527#ifdef __BIG_ENDIAN
1528 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1529#endif
1530 break;
1531 case DRM_FORMAT_XRGB1555:
1532 fb_format =
1533 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1534 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1535#ifdef __BIG_ENDIAN
1536 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1537#endif
1538 break;
1539 case DRM_FORMAT_RGB565:
1540 fb_format =
1541 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1542 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1543#ifdef __BIG_ENDIAN
1544 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1545#endif
1546 break;
1547 case DRM_FORMAT_XRGB8888:
1548 case DRM_FORMAT_ARGB8888:
1549 fb_format =
1550 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1551 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1552#ifdef __BIG_ENDIAN
1553 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1554#endif
1555 break;
1556 case DRM_FORMAT_XRGB2101010:
1557 case DRM_FORMAT_ARGB2101010:
1558 fb_format =
1559 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1560 AVIVO_D1GRPH_CONTROL_32BPP_ARGB2101010;
1561#ifdef __BIG_ENDIAN
1562 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1563#endif
1564
1565 bypass_lut = true;
1566 break;
1567 case DRM_FORMAT_XBGR8888:
1568 case DRM_FORMAT_ABGR8888:
1569 fb_format =
1570 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1571 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1572 if (rdev->family >= CHIP_R600)
1573 fb_swap =
1574 (R600_D1GRPH_RED_CROSSBAR(R600_D1GRPH_RED_SEL_B) |
1575 R600_D1GRPH_BLUE_CROSSBAR(R600_D1GRPH_BLUE_SEL_R));
1576 else
1577 fb_format |= AVIVO_D1GRPH_SWAP_RB;
1578#ifdef __BIG_ENDIAN
1579 fb_swap |= R600_D1GRPH_SWAP_ENDIAN_32BIT;
1580#endif
1581 break;
1582 default:
1583 DRM_ERROR("Unsupported screen format %s\n",
1584 drm_get_format_name(target_fb->format->format, &format_name));
1585 return -EINVAL;
1586 }
1587
1588 if (rdev->family >= CHIP_R600) {
1589 if (tiling_flags & RADEON_TILING_MACRO)
1590 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1591 else if (tiling_flags & RADEON_TILING_MICRO)
1592 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1593 } else {
1594 if (tiling_flags & RADEON_TILING_MACRO)
1595 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1596
1597 if (tiling_flags & RADEON_TILING_MICRO)
1598 fb_format |= AVIVO_D1GRPH_TILED;
1599 }
1600
1601 if (radeon_crtc->crtc_id == 0)
1602 WREG32(AVIVO_D1VGA_CONTROL, 0);
1603 else
1604 WREG32(AVIVO_D2VGA_CONTROL, 0);
1605
1606
1607
1608
1609 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, 0);
1610
1611 if (rdev->family >= CHIP_RV770) {
1612 if (radeon_crtc->crtc_id) {
1613 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1614 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1615 } else {
1616 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1617 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1618 }
1619 }
1620 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1621 (u32) fb_location);
1622 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1623 radeon_crtc->crtc_offset, (u32) fb_location);
1624 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1625 if (rdev->family >= CHIP_R600)
1626 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1627
1628
1629 WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset,
1630 (bypass_lut ? AVIVO_LUT_10BIT_BYPASS_EN : 0), ~AVIVO_LUT_10BIT_BYPASS_EN);
1631
1632 if (bypass_lut)
1633 DRM_DEBUG_KMS("Bypassing hardware LUT due to 10 bit fb scanout.\n");
1634
1635 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1636 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1637 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1638 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1639 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1640 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1641
1642 fb_pitch_pixels = target_fb->pitches[0] / target_fb->format->cpp[0];
1643 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1644 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1645
1646 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1647 target_fb->height);
1648 x &= ~3;
1649 y &= ~1;
1650 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1651 (x << 16) | y);
1652 viewport_w = crtc->mode.hdisplay;
1653 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1654 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1655 (viewport_w << 16) | viewport_h);
1656
1657
1658 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 3);
1659
1660 if (!atomic && fb && fb != crtc->primary->fb) {
1661 rbo = gem_to_radeon_bo(fb->obj[0]);
1662 r = radeon_bo_reserve(rbo, false);
1663 if (unlikely(r != 0))
1664 return r;
1665 radeon_bo_unpin(rbo);
1666 radeon_bo_unreserve(rbo);
1667 }
1668
1669
1670 radeon_bandwidth_update(rdev);
1671
1672 return 0;
1673}
1674
1675int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1676 struct drm_framebuffer *old_fb)
1677{
1678 struct drm_device *dev = crtc->dev;
1679 struct radeon_device *rdev = dev->dev_private;
1680
1681 if (ASIC_IS_DCE4(rdev))
1682 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1683 else if (ASIC_IS_AVIVO(rdev))
1684 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1685 else
1686 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1687}
1688
1689int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1690 struct drm_framebuffer *fb,
1691 int x, int y, enum mode_set_atomic state)
1692{
1693 struct drm_device *dev = crtc->dev;
1694 struct radeon_device *rdev = dev->dev_private;
1695
1696 if (ASIC_IS_DCE4(rdev))
1697 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1698 else if (ASIC_IS_AVIVO(rdev))
1699 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1700 else
1701 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1702}
1703
1704
1705static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1706{
1707 struct drm_device *dev = crtc->dev;
1708 struct radeon_device *rdev = dev->dev_private;
1709 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1710 u32 disp_merge_cntl;
1711
1712 switch (radeon_crtc->crtc_id) {
1713 case 0:
1714 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1715 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1716 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1717 break;
1718 case 1:
1719 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1720 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1721 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1722 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1723 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1724 break;
1725 }
1726}
1727
1728
1729
1730
1731
1732
1733
1734
1735static u32 radeon_get_pll_use_mask(struct drm_crtc *crtc)
1736{
1737 struct drm_device *dev = crtc->dev;
1738 struct drm_crtc *test_crtc;
1739 struct radeon_crtc *test_radeon_crtc;
1740 u32 pll_in_use = 0;
1741
1742 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1743 if (crtc == test_crtc)
1744 continue;
1745
1746 test_radeon_crtc = to_radeon_crtc(test_crtc);
1747 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1748 pll_in_use |= (1 << test_radeon_crtc->pll_id);
1749 }
1750 return pll_in_use;
1751}
1752
1753
1754
1755
1756
1757
1758
1759
1760
1761
1762static int radeon_get_shared_dp_ppll(struct drm_crtc *crtc)
1763{
1764 struct drm_device *dev = crtc->dev;
1765 struct radeon_device *rdev = dev->dev_private;
1766 struct drm_crtc *test_crtc;
1767 struct radeon_crtc *test_radeon_crtc;
1768
1769 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1770 if (crtc == test_crtc)
1771 continue;
1772 test_radeon_crtc = to_radeon_crtc(test_crtc);
1773 if (test_radeon_crtc->encoder &&
1774 ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1775
1776 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1777 test_radeon_crtc->pll_id == ATOM_PPLL2)
1778 continue;
1779
1780 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1781 return test_radeon_crtc->pll_id;
1782 }
1783 }
1784 return ATOM_PPLL_INVALID;
1785}
1786
1787
1788
1789
1790
1791
1792
1793
1794
1795
1796static int radeon_get_shared_nondp_ppll(struct drm_crtc *crtc)
1797{
1798 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1799 struct drm_device *dev = crtc->dev;
1800 struct radeon_device *rdev = dev->dev_private;
1801 struct drm_crtc *test_crtc;
1802 struct radeon_crtc *test_radeon_crtc;
1803 u32 adjusted_clock, test_adjusted_clock;
1804
1805 adjusted_clock = radeon_crtc->adjusted_clock;
1806
1807 if (adjusted_clock == 0)
1808 return ATOM_PPLL_INVALID;
1809
1810 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1811 if (crtc == test_crtc)
1812 continue;
1813 test_radeon_crtc = to_radeon_crtc(test_crtc);
1814 if (test_radeon_crtc->encoder &&
1815 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1816
1817 if (ASIC_IS_DCE61(rdev) && !ASIC_IS_DCE8(rdev) &&
1818 test_radeon_crtc->pll_id == ATOM_PPLL2)
1819 continue;
1820
1821 if (test_radeon_crtc->connector == radeon_crtc->connector) {
1822
1823 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1824 return test_radeon_crtc->pll_id;
1825 }
1826
1827 test_adjusted_clock = test_radeon_crtc->adjusted_clock;
1828 if ((crtc->mode.clock == test_crtc->mode.clock) &&
1829 (adjusted_clock == test_adjusted_clock) &&
1830 (radeon_crtc->ss_enabled == test_radeon_crtc->ss_enabled) &&
1831 (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID))
1832 return test_radeon_crtc->pll_id;
1833 }
1834 }
1835 return ATOM_PPLL_INVALID;
1836}
1837
1838
1839
1840
1841
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873
1874
1875static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1876{
1877 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1878 struct drm_device *dev = crtc->dev;
1879 struct radeon_device *rdev = dev->dev_private;
1880 struct radeon_encoder *radeon_encoder =
1881 to_radeon_encoder(radeon_crtc->encoder);
1882 u32 pll_in_use;
1883 int pll;
1884
1885 if (ASIC_IS_DCE8(rdev)) {
1886 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1887 if (rdev->clock.dp_extclk)
1888
1889 return ATOM_PPLL_INVALID;
1890 else {
1891
1892 pll = radeon_get_shared_dp_ppll(crtc);
1893 if (pll != ATOM_PPLL_INVALID)
1894 return pll;
1895 }
1896 } else {
1897
1898 pll = radeon_get_shared_nondp_ppll(crtc);
1899 if (pll != ATOM_PPLL_INVALID)
1900 return pll;
1901 }
1902
1903 if ((rdev->family == CHIP_KABINI) ||
1904 (rdev->family == CHIP_MULLINS)) {
1905
1906 pll_in_use = radeon_get_pll_use_mask(crtc);
1907 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1908 return ATOM_PPLL2;
1909 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1910 return ATOM_PPLL1;
1911 DRM_ERROR("unable to allocate a PPLL\n");
1912 return ATOM_PPLL_INVALID;
1913 } else {
1914
1915 pll_in_use = radeon_get_pll_use_mask(crtc);
1916 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1917 return ATOM_PPLL2;
1918 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1919 return ATOM_PPLL1;
1920 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1921 return ATOM_PPLL0;
1922 DRM_ERROR("unable to allocate a PPLL\n");
1923 return ATOM_PPLL_INVALID;
1924 }
1925 } else if (ASIC_IS_DCE61(rdev)) {
1926 struct radeon_encoder_atom_dig *dig =
1927 radeon_encoder->enc_priv;
1928
1929 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1930 (dig->linkb == false))
1931
1932 return ATOM_PPLL2;
1933 else if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1934
1935 if (rdev->clock.dp_extclk)
1936
1937 return ATOM_PPLL_INVALID;
1938 else {
1939
1940 pll = radeon_get_shared_dp_ppll(crtc);
1941 if (pll != ATOM_PPLL_INVALID)
1942 return pll;
1943 }
1944 } else {
1945
1946 pll = radeon_get_shared_nondp_ppll(crtc);
1947 if (pll != ATOM_PPLL_INVALID)
1948 return pll;
1949 }
1950
1951 pll_in_use = radeon_get_pll_use_mask(crtc);
1952 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1953 return ATOM_PPLL0;
1954 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1955 return ATOM_PPLL1;
1956 DRM_ERROR("unable to allocate a PPLL\n");
1957 return ATOM_PPLL_INVALID;
1958 } else if (ASIC_IS_DCE41(rdev)) {
1959
1960 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1961 if (rdev->clock.dp_extclk)
1962
1963 return ATOM_PPLL_INVALID;
1964 }
1965 pll_in_use = radeon_get_pll_use_mask(crtc);
1966 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1967 return ATOM_PPLL1;
1968 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1969 return ATOM_PPLL2;
1970 DRM_ERROR("unable to allocate a PPLL\n");
1971 return ATOM_PPLL_INVALID;
1972 } else if (ASIC_IS_DCE4(rdev)) {
1973
1974
1975
1976
1977
1978
1979
1980
1981
1982
1983 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1984 if (rdev->clock.dp_extclk)
1985
1986 return ATOM_PPLL_INVALID;
1987 else if (ASIC_IS_DCE6(rdev))
1988
1989 return ATOM_PPLL0;
1990 else if (ASIC_IS_DCE5(rdev))
1991
1992 return ATOM_DCPLL;
1993 else {
1994
1995 pll = radeon_get_shared_dp_ppll(crtc);
1996 if (pll != ATOM_PPLL_INVALID)
1997 return pll;
1998 }
1999 } else {
2000
2001 pll = radeon_get_shared_nondp_ppll(crtc);
2002 if (pll != ATOM_PPLL_INVALID)
2003 return pll;
2004 }
2005
2006 pll_in_use = radeon_get_pll_use_mask(crtc);
2007 if (!(pll_in_use & (1 << ATOM_PPLL1)))
2008 return ATOM_PPLL1;
2009 if (!(pll_in_use & (1 << ATOM_PPLL2)))
2010 return ATOM_PPLL2;
2011 DRM_ERROR("unable to allocate a PPLL\n");
2012 return ATOM_PPLL_INVALID;
2013 } else {
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029 return radeon_crtc->crtc_id;
2030 }
2031}
2032
2033void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
2034{
2035
2036 if (ASIC_IS_DCE6(rdev))
2037 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2038 else if (ASIC_IS_DCE4(rdev)) {
2039 struct radeon_atom_ss ss;
2040 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
2041 ASIC_INTERNAL_SS_ON_DCPLL,
2042 rdev->clock.default_dispclk);
2043 if (ss_enabled)
2044 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
2045
2046 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
2047 if (ss_enabled)
2048 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
2049 }
2050
2051}
2052
2053int atombios_crtc_mode_set(struct drm_crtc *crtc,
2054 struct drm_display_mode *mode,
2055 struct drm_display_mode *adjusted_mode,
2056 int x, int y, struct drm_framebuffer *old_fb)
2057{
2058 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2059 struct drm_device *dev = crtc->dev;
2060 struct radeon_device *rdev = dev->dev_private;
2061 struct radeon_encoder *radeon_encoder =
2062 to_radeon_encoder(radeon_crtc->encoder);
2063 bool is_tvcv = false;
2064
2065 if (radeon_encoder->active_device &
2066 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
2067 is_tvcv = true;
2068
2069 if (!radeon_crtc->adjusted_clock)
2070 return -EINVAL;
2071
2072 atombios_crtc_set_pll(crtc, adjusted_mode);
2073
2074 if (ASIC_IS_DCE4(rdev))
2075 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2076 else if (ASIC_IS_AVIVO(rdev)) {
2077 if (is_tvcv)
2078 atombios_crtc_set_timing(crtc, adjusted_mode);
2079 else
2080 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2081 } else {
2082 atombios_crtc_set_timing(crtc, adjusted_mode);
2083 if (radeon_crtc->crtc_id == 0)
2084 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
2085 radeon_legacy_atom_fixup(crtc);
2086 }
2087 atombios_crtc_set_base(crtc, x, y, old_fb);
2088 atombios_overscan_setup(crtc, mode, adjusted_mode);
2089 atombios_scaler_setup(crtc);
2090 radeon_cursor_reset(crtc);
2091
2092 radeon_crtc->hw_mode = *adjusted_mode;
2093
2094 return 0;
2095}
2096
2097static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
2098 const struct drm_display_mode *mode,
2099 struct drm_display_mode *adjusted_mode)
2100{
2101 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2102 struct drm_device *dev = crtc->dev;
2103 struct drm_encoder *encoder;
2104
2105
2106 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
2107 if (encoder->crtc == crtc) {
2108 radeon_crtc->encoder = encoder;
2109 radeon_crtc->connector = radeon_get_connector_for_encoder(encoder);
2110 break;
2111 }
2112 }
2113 if ((radeon_crtc->encoder == NULL) || (radeon_crtc->connector == NULL)) {
2114 radeon_crtc->encoder = NULL;
2115 radeon_crtc->connector = NULL;
2116 return false;
2117 }
2118 if (radeon_crtc->encoder) {
2119 struct radeon_encoder *radeon_encoder =
2120 to_radeon_encoder(radeon_crtc->encoder);
2121
2122 radeon_crtc->output_csc = radeon_encoder->output_csc;
2123 }
2124 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
2125 return false;
2126 if (!atombios_crtc_prepare_pll(crtc, adjusted_mode))
2127 return false;
2128
2129 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
2130
2131 if ((radeon_crtc->pll_id == ATOM_PPLL_INVALID) &&
2132 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder)))
2133 return false;
2134
2135 return true;
2136}
2137
2138static void atombios_crtc_prepare(struct drm_crtc *crtc)
2139{
2140 struct drm_device *dev = crtc->dev;
2141 struct radeon_device *rdev = dev->dev_private;
2142
2143
2144 if (ASIC_IS_DCE6(rdev))
2145 atombios_powergate_crtc(crtc, ATOM_DISABLE);
2146
2147 atombios_lock_crtc(crtc, ATOM_ENABLE);
2148 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2149}
2150
2151static void atombios_crtc_commit(struct drm_crtc *crtc)
2152{
2153 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
2154 atombios_lock_crtc(crtc, ATOM_DISABLE);
2155}
2156
2157static void atombios_crtc_disable(struct drm_crtc *crtc)
2158{
2159 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
2160 struct drm_device *dev = crtc->dev;
2161 struct radeon_device *rdev = dev->dev_private;
2162 struct radeon_atom_ss ss;
2163 int i;
2164
2165 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
2166 if (crtc->primary->fb) {
2167 int r;
2168 struct radeon_bo *rbo;
2169
2170 rbo = gem_to_radeon_bo(crtc->primary->fb->obj[0]);
2171 r = radeon_bo_reserve(rbo, false);
2172 if (unlikely(r))
2173 DRM_ERROR("failed to reserve rbo before unpin\n");
2174 else {
2175 radeon_bo_unpin(rbo);
2176 radeon_bo_unreserve(rbo);
2177 }
2178 }
2179
2180 if (ASIC_IS_DCE4(rdev))
2181 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2182 else if (ASIC_IS_AVIVO(rdev))
2183 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 0);
2184
2185 if (ASIC_IS_DCE6(rdev))
2186 atombios_powergate_crtc(crtc, ATOM_ENABLE);
2187
2188 for (i = 0; i < rdev->num_crtc; i++) {
2189 if (rdev->mode_info.crtcs[i] &&
2190 rdev->mode_info.crtcs[i]->enabled &&
2191 i != radeon_crtc->crtc_id &&
2192 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) {
2193
2194
2195
2196 goto done;
2197 }
2198 }
2199
2200 switch (radeon_crtc->pll_id) {
2201 case ATOM_PPLL1:
2202 case ATOM_PPLL2:
2203
2204 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2205 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2206 break;
2207 case ATOM_PPLL0:
2208
2209 if ((rdev->family == CHIP_ARUBA) ||
2210 (rdev->family == CHIP_KAVERI) ||
2211 (rdev->family == CHIP_BONAIRE) ||
2212 (rdev->family == CHIP_HAWAII))
2213 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
2214 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
2215 break;
2216 default:
2217 break;
2218 }
2219done:
2220 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2221 radeon_crtc->adjusted_clock = 0;
2222 radeon_crtc->encoder = NULL;
2223 radeon_crtc->connector = NULL;
2224}
2225
2226static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
2227 .dpms = atombios_crtc_dpms,
2228 .mode_fixup = atombios_crtc_mode_fixup,
2229 .mode_set = atombios_crtc_mode_set,
2230 .mode_set_base = atombios_crtc_set_base,
2231 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
2232 .prepare = atombios_crtc_prepare,
2233 .commit = atombios_crtc_commit,
2234 .disable = atombios_crtc_disable,
2235};
2236
2237void radeon_atombios_init_crtc(struct drm_device *dev,
2238 struct radeon_crtc *radeon_crtc)
2239{
2240 struct radeon_device *rdev = dev->dev_private;
2241
2242 if (ASIC_IS_DCE4(rdev)) {
2243 switch (radeon_crtc->crtc_id) {
2244 case 0:
2245 default:
2246 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
2247 break;
2248 case 1:
2249 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
2250 break;
2251 case 2:
2252 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
2253 break;
2254 case 3:
2255 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
2256 break;
2257 case 4:
2258 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
2259 break;
2260 case 5:
2261 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
2262 break;
2263 }
2264 } else {
2265 if (radeon_crtc->crtc_id == 1)
2266 radeon_crtc->crtc_offset =
2267 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
2268 else
2269 radeon_crtc->crtc_offset = 0;
2270 }
2271 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
2272 radeon_crtc->adjusted_clock = 0;
2273 radeon_crtc->encoder = NULL;
2274 radeon_crtc->connector = NULL;
2275 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
2276}
2277